answer
stringlengths 17
10.2M
|
|---|
package org.pentaho.ui.xul.swt.tags;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Widget;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulDomContainer;
import org.pentaho.ui.xul.XulSettingsManager;
import org.pentaho.ui.xul.components.XulDialogheader;
import org.pentaho.ui.xul.containers.XulDialog;
import org.pentaho.ui.xul.containers.XulRoot;
import org.pentaho.ui.xul.containers.XulWindow;
import org.pentaho.ui.xul.dom.Element;
import org.pentaho.ui.xul.swt.AbstractSwtXulContainer;
import org.pentaho.ui.xul.swt.DialogButton;
import org.pentaho.ui.xul.swt.custom.BasicDialog;
import org.pentaho.ui.xul.util.Orient;
import org.pentaho.ui.xul.util.SwtXulUtil;
public class SwtDialog extends AbstractSwtXulContainer implements XulDialog {
XulDomContainer domContainer = null;
private BasicDialog dialog = null;
private String title = null;
private String onload;
private String onclose;
private String onunload;
private XulDialogheader header;
private int height = -999;
private int width = -999;
private boolean isDialogHidden = true;
private int returnCode = -9999;
private BUTTON_ALIGN buttonAlignment;
private enum BUTTON_ALIGN {
START, CENTER, END, LEFT, RIGHT, MIDDLE
};
private Map<String, SwtButton> activeDialogButtons = new HashMap<String, SwtButton>();
private String buttonlabelaccept;
private String buttonlabelcancel;
private String buttonlabelextra1;
private String buttonlabelextra2;
private String[] buttons = new String[]{"accept", "cancel"};
private String ondialogaccept;
private String ondialogcancel;
private String ondialogextra1;
private String ondialogextra2;
private boolean resizable = false;
private boolean buttonsCreated = false;
private String appIcon;
private static final Log logger = LogFactory.getLog(SwtDialog.class);
private boolean pack;
private XulSettingsManager settingsManager;
private boolean closing;
private boolean letDialogDispose;
public SwtDialog(Element self, XulComponent parent, XulDomContainer container, String tagName) {
super(tagName);
this.setOrient(Orient.VERTICAL.toString());
this.domContainer = container;
this.setId(self.getAttributeValue("ID"));
// TODO: defer this creation until later when all attributes are assigned. For now just get the
// resizable one
String resizableStr = self.getAttributeValue("resizable");
this.setResizable(resizableStr != null && resizableStr.equals("true"));
if(self != null) {
// Extract appIcon
setAppicon(self.getAttributeValue("appicon"));
}
dialog = createDialog(parent);
Composite c = createDialogComposite();
setManagedObject(c);
settingsManager = container.getSettingsManager();
}
private Shell getParentShell(XulComponent parent){
Shell parentShell = null;
if (parent != null){
if(parent instanceof XulWindow){
// See if they registered an outter context replacement for the Window's Shell
if (domContainer.getOuterContext() != null){
parentShell = (Shell) domContainer.getOuterContext();
}
}
if(parentShell == null && parent instanceof XulRoot){
parentShell = (Shell) ((XulRoot) parent).getRootObject();
}
}
if(parentShell == null){
parentShell = new Shell(SWT.SHELL_TRIM);
}
return parentShell;
}
private BasicDialog createDialog(XulComponent parent) {
Shell parentShell = getParentShell(parent);
final BasicDialog newDialog = new BasicDialog(parentShell, true);
newDialog.getShell().setBackgroundMode(SWT.INHERIT_DEFAULT);
newDialog.getShell().addListener(SWT.Dispose, new Listener(){
public void handleEvent(Event event) {
if(!letDialogDispose){
hide();
}
}
});
if(StringUtils.isNotEmpty(this.appIcon)){
setAppicon(this.appIcon);
} else if(parentShell != null && parentShell.isDisposed() == false){
Image parentImg = parentShell.getImage();
if(parentImg != null){
newDialog.getShell().setImage(parentImg);
}
}
return newDialog;
}
private Composite createDialogComposite(){
Composite c = new Composite((Composite) dialog.getMainDialogArea(), SWT.NONE);
GridData gd = new GridData(GridData.FILL_BOTH);
gd.grabExcessVerticalSpace = true;
gd.grabExcessHorizontalSpace = true;
c.setLayoutData(gd);
return c;
}
public Shell getShell(){
return dialog != null ? dialog.getShell() : null;
}
public void dispose(){
letDialogDispose = true;
if(getShell() != null){
getShell().dispose();
}
}
public String getButtonlabelaccept() {
return buttonlabelaccept;
}
public String getButtonlabelcancel() {
return buttonlabelcancel;
}
public String getButtons() {
return StringUtils.join(buttons, ",");
}
public String getOndialogaccept() {
return ondialogaccept;
}
public String getOndialogcancel() {
return ondialogcancel;
}
public String getTitle() {
return title;
}
public void setButtonlabelaccept(String label) {
this.buttonlabelaccept = label;
}
public void setButtonlabelcancel(String label) {
this.buttonlabelcancel = label;
}
public void setButtons(){
setButtons(dialog);
buttonsCreated = true;
}
public void setButtons(String buttonList) {
if(buttonList.equals("")){ //$NON-NLS-1$
buttons = null;
} else {
List<String> newButtons = Arrays.asList(buttonList.split(",")); //$NON-NLS-1$
// Cleanup new buttons
for(int i = 0; i < newButtons.size(); i++) {
newButtons.set(i, newButtons.get(i).trim().toUpperCase());
}
String[] existingButtons = buttons;
buttons = (String[])newButtons.toArray();
for(String existingButton : existingButtons) {
if(!newButtons.contains(existingButton.trim().toUpperCase())) {
removeButton(existingButton);
}
}
}
if(buttonsCreated){
setButtons(dialog);
}
}
protected void removeButton(String button) {
String bName = button.trim().toUpperCase();
if(activeDialogButtons.containsKey(bName)) {
SwtButton b = activeDialogButtons.get(bName);
Button swtB = (Button)b.getManagedObject();
if(!swtB.isDisposed()) {
swtB.dispose();
}
activeDialogButtons.remove(bName);
removeChild(b);
}
}
public void setOndialogaccept(String command) {
this.ondialogaccept = command;
}
public void setOndialogcancel(String command) {
this.ondialogcancel = command;
if (ondialogcancel != null){
dialog.addShellListener(new ShellAdapter(){
public void shellClosed(ShellEvent e){
invoke(ondialogcancel);
}
});
}
}
public void setTitle(String title) {
this.title = title;
}
public void show() {
show(true);
}
public void show(boolean force) {
//createDialog();
if((force) || (!buttonsCreated)){
setButtons();
}
isDialogHidden = false;
dialog.getShell().setText(title);
int storedHeight = 0;
int storedWidth = 0;
int storedLeft = 0;
int storedTop = 0;
if(settingsManager != null){
String sWidth = settingsManager.getSetting(getId()+".Width");
String sHeight = settingsManager.getSetting(getId()+".Height");
String sTop = settingsManager.getSetting(getId()+".Top");
String sLeft = settingsManager.getSetting(getId()+".Left");
if(sWidth != null && sHeight != null){
storedWidth = Integer.parseInt(sWidth);
storedHeight = Integer.parseInt(sHeight);
}
if(sTop != null && sLeft != null){
storedLeft = Integer.parseInt(sLeft);
storedTop = Integer.parseInt(sTop);
}
}
// Because the dialog is built after the create() method is called, we
// need to ask the shell to try to re-determine an appropriate size for this dialog..
if(storedHeight > 0 && storedWidth> 0){
dialog.setHeight(storedHeight);
dialog.setWidth(storedWidth);
} else if ((height > 0) && (width > 0)){
// Don't allow the user to size the dialog smaller than is reasonable to
// layout the child components
// REMOVED: although the idea is sound, labels of great length that are ment to wrap
// will report their preferred size and quite large, which when applied is undesirable.
//Point pt = dialog.getPreferredSize();
//dialog.setHeight( (pt.y < height) ? height : pt.y);
//dialog.setWidth((pt.x < width) ? width : pt.x);
// Don't allow the user to size the dialog smaller than is reasonable to
// layout the child components
dialog.setHeight(height);
dialog.setWidth(width);
}
dialog.resizeBounds();
if(pack){
dialog.getShell().pack();
}
dialog.getMainArea().layout(true, true);
// Timing is everything - fire the onLoad evetns so tht anyone who is trying to
notifyListeners(XulRoot.EVENT_ON_LOAD);
if(storedTop > 0 && storedLeft > 0){
dialog.getShell().setLocation(new Point(storedLeft, storedTop));
}
returnCode = dialog.open();
// dialog.setBlockOnOpen(true);
// dialog.getShell().setVisible(true);
}
public void setButtons(final BasicDialog d){
if (buttons == null){
d.getButtonArea().setVisible(false);
d.getButtonArea().getParent().setVisible(false);
((GridData) d.getButtonArea().getParent().getLayoutData()).exclude = true;
d.getShell().layout(true);
return;
}
for (String buttonName : buttons) {
if(StringUtils.isEmpty(buttonName)){
return;
}
DialogButton thisButton = DialogButton.valueOf(buttonName.trim().toUpperCase());
SwtButton swtButton = null;
SwtButton existingButton = (this.getDocument() != null) ? (SwtButton) this.getElementById(this.getId()+"_" + buttonName.trim().toLowerCase()) : null;
if(this.getId() != null && existingButton != null){
//existing button, just needs a new Widget parent
swtButton = existingButton;
Widget w = (Widget)existingButton.getManagedObject();
if ((w==null)|| (w.isDisposed())){
Button button = d.createButton(thisButton, false);
swtButton.setButton(button);
}
} else {
//new button needed
Button button = d.createButton(thisButton, false);
swtButton = new SwtButton(button);
swtButton.setId(this.getId()+"_" + buttonName.trim().toLowerCase());
this.addChild(swtButton);
}
switch (thisButton){
case ACCEPT:
if ((getButtonlabelaccept() != null) && (getButtonlabelaccept().trim().length() > 0)){
swtButton.setLabel(getButtonlabelaccept());
}
if ((getOndialogaccept() != null) && (getOndialogaccept().trim().length() > 0)){
swtButton.setOnclick(getOndialogaccept());
}
break;
case CANCEL:
if ((getButtonlabelcancel() != null) && (getButtonlabelcancel().trim().length() > 0)){
swtButton.setLabel(getButtonlabelcancel());
}
if ((getOndialogcancel() != null) && (getOndialogcancel().trim().length() > 0)){
swtButton.setOnclick(getOndialogcancel());
}
break;
case EXTRA1:
if ((getButtonlabelextra1() != null) && (getButtonlabelextra1().trim().length() > 0)){
swtButton.setLabel(getButtonlabelextra1());
}
if ((getOndialogextra1() != null) && (getOndialogextra1().trim().length() > 0)){
swtButton.setOnclick(getOndialogextra1());
}
break;
case EXTRA2:
if ((getButtonlabelextra2() != null) && (getButtonlabelextra2().trim().length() > 0)){
swtButton.setLabel(getButtonlabelextra2());
}
if ((getOndialogextra2() != null) && (getOndialogextra2().trim().length() > 0)){
swtButton.setOnclick(getOndialogextra2());
}
break;
}
activeDialogButtons.put(thisButton.toString().toUpperCase(), swtButton);
}
int width = 75;
for(Map.Entry<String, SwtButton> entry : activeDialogButtons.entrySet()){
width = Math.max(width, entry.getValue().button.getBounds().width);
}
GridData gd = new GridData();
gd.widthHint = width;
for(Map.Entry<String, SwtButton> entry : activeDialogButtons.entrySet()){
entry.getValue().button.setLayoutData(gd);
}
}
public void hide() {
//dialog.getShell().removeListener(SWT.Dispose, listener);
if(closing || dialog.getMainArea().isDisposed()){
return;
}
closing = true;
if(settingsManager != null){
settingsManager.storeSetting(getId()+".Left", ""+dialog.getShell().getLocation().x);
settingsManager.storeSetting(getId()+".Top", ""+dialog.getShell().getLocation().y);
settingsManager.storeSetting(getId()+".Height", ""+dialog.getShell().getSize().y);
settingsManager.storeSetting(getId()+".Width", ""+dialog.getShell().getSize().x);
try {
settingsManager.save();
} catch (IOException e) {
logger.error(e);
}
}
returnCode = IDialogConstants.CLOSE_ID;
BasicDialog newDialog = createDialog(getParent());
Control[] controlz = newDialog.getMainArea().getChildren();
for(Control c : controlz){
c.dispose();
}
Control[] controls = dialog.getMainArea().getChildren();
for(Control c : controls){
c.setParent(newDialog.getMainArea());
}
setButtons(newDialog);
setAppicon(this.appIcon);
newDialog.getShell().layout();
BasicDialog outgoingDialog = dialog;
dialog = newDialog;
outgoingDialog.close();
isDialogHidden = true;
setManagedObject(dialog.getMainArea());
closing = false;
}
public void setVisible(boolean visible) {
if (visible) {
show();
} else {
hide();
}
}
@Override
public void layout() {
setButtons();
super.layout();
for (XulComponent comp : getChildNodes()) {
if (comp instanceof XulDialogheader) {
header = (XulDialogheader) comp;
}
}
}
public int getHeight() {
return this.height;
}
public int getWidth() {
return this.width;
}
public void setHeight(int height) {
this.height = height;
}
public void setWidth(int width) {
this.width = width;
}
public String getButtonalign() {
return this.buttonAlignment.toString().toLowerCase();
}
public void setButtonalign(String align) {
this.buttonAlignment = SwtDialog.BUTTON_ALIGN.valueOf(align.toUpperCase());
}
public String getOnload() {
return onload;
}
public void setOnload(final String method) {
this.onload = method;
// @TODO This whole listener pattern needs to be replaced with a generic solution
// dialog.getShell().addListener(XulRoot.EVENT_ON_LOAD, new Listener() {
// public void handleEvent(Event e) {
// if(!StringUtils.isEmpty(method)){
// // only call this if the application is ready. Otherwise it's being handled in the main of the
// // program
// if(SwtDialog.this.domContainer.isInitialized()){
// invoke(method);
}
/**
* @deprecated This will be replaced by an agnostic listener pattern in the next version of Xul
* @param event
*/
@Deprecated
public void notifyListeners(int event) {
if (!dialog.getShell().isDisposed()) {
dialog.getShell().notifyListeners(event, new Event());
}
}
public boolean isHidden() {
return isDialogHidden;
}
public String getButtonlabelextra1() {
return buttonlabelextra1;
}
public void setButtonlabelextra1(String buttonlabelextra1) {
this.buttonlabelextra1 = buttonlabelextra1;
}
public String getButtonlabelextra2() {
return buttonlabelextra2;
}
public void setButtonlabelextra2(String buttonlabelextra2) {
this.buttonlabelextra2 = buttonlabelextra2;
}
public String getOndialogextra1() {
return ondialogextra1;
}
public void setOndialogextra1(String ondialogextra1) {
this.ondialogextra1 = ondialogextra1;
}
public String getOndialogextra2() {
return ondialogextra2;
}
public void setOndialogextra2(String ondialogextra2) {
this.ondialogextra2 = ondialogextra2;
}
public XulDomContainer getXulDomContainer() {
return this.domContainer;
}
public void setXulDomContainer(XulDomContainer xulDomContainer) {
this.domContainer = xulDomContainer;
}
public int getReturnCode(){
return returnCode;
}
public Object getRootObject() {
return dialog.getShell();
}
public String getOnclose() {
return onclose;
}
public String getOnunload() {
return onunload;
}
public void setOnclose(String onclose) {
this.onclose = onclose;
}
public void setOnunload(String onunload) {
this.onunload = onunload;
}
public void invokeLater(Runnable runnable) {
dialog.getShell().getDisplay().asyncExec(runnable);
}
public Boolean getResizable() {
return resizable;
}
public void setResizable(Boolean resizable) {
this.resizable = resizable;
}
public void setModal(Boolean modal) {
throw new RuntimeException("Not Yet Implemented");
}
public void applyParentShellIcon(){
}
public void setAppicon(String icon) {
this.appIcon = icon;
if(appIcon == null || dialog == null) {
return;
}
Display d = dialog.getShell().getDisplay();
if(d == null){
d = Display.getCurrent() != null ? Display.getCurrent() : Display.getDefault();
}
// first look for the appIcon in XUL known paths ...
Image img = SwtXulUtil.getCachedImage(appIcon, domContainer, d);
// try second method of finding the appIcon...
if (img==null){
InputStream in = null;
in = getClass().getResourceAsStream(appIcon);
if(in == null){
try{
File f = new File(icon);
if(f.exists()){
try {
in = new FileInputStream(f);
} catch (FileNotFoundException e) {}
} else {
logger.warn("could not find image: "+appIcon);
return;
}
img = new Image(dialog.getShell().getDisplay(), in);
} finally {
try{
if(in != null){
in.close();
}
} catch(IOException ignored){}
}
}
}
if(img != null && dialog != null) {
dialog.getShell().setImage(img);
}
}
public boolean isPack() {
return pack;
}
public void setPack(boolean pack) {
this.pack = pack;
}
}
|
package edu.umd.cs.piccolox.pswing;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.io.IOException;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.RepaintManager;
import junit.framework.TestCase;
import edu.umd.cs.piccolo.util.PPaintContext;
public class PSwingTest extends TestCase {
public void setUp() {
RepaintManager.setCurrentManager(new PSwingRepaintManager());
}
public void testConstructorFailsOnNullComponent() {
try {
new PSwing(null);
}
catch (final NullPointerException e) {
// expected
}
}
public void testPSwingRegistersItselfWithComponent() {
final JPanel panel = new JPanel();
final PSwing pSwing = new PSwing(panel);
assertEquals(pSwing, panel.getClientProperty(PSwing.PSWING_PROPERTY));
}
public void testGetComponentReturnsValidComponent() {
final JPanel panel = new JPanel();
final PSwing pSwing = new PSwing(panel);
assertEquals(panel, pSwing.getComponent());
}
public void testPSwingResizesItselfWhenComponentIsResized() {
final boolean[] reshaped = new boolean[1];
final JPanel panel = new JPanel();
new PSwing(panel) {
public void updateBounds() {
super.updateBounds();
reshaped[0] = true;
}
};
panel.setSize(100, 100);
assertTrue(reshaped[0]);
}
public void testPSwingDelegatesPaintingToItsComponent() throws IOException {
final JPanel panel = new JPanel();
final MockPaintingPSwing pSwing = new MockPaintingPSwing(panel);
panel.setBackground(Color.RED);
panel.setPreferredSize(new Dimension(100, 100));
final BufferedImage img = pSwing.paintComponent();
assertEquals(Color.RED.getRGB(), img.getRGB(50, 50));
}
public void testHidingComponentHidesPSwing() {
final JPanel panel = new JPanel();
final PSwing pSwing = new PSwing(panel);
panel.setPreferredSize(new Dimension(100, 100));
pSwing.setBounds(0, 0, 00, 100);
panel.setVisible(false);
// Wow, do I hate this chunk of code. Turns out that the event dispatch
// thread needs time to push the component hidden method before this
// test passes
// There has to be a way of forcing this without a sleep
assertDelayedSuccess("setting component to invisible did not reflect in associated PSwing", 500,
new Predicate() {
public boolean isTrue() {
return !pSwing.getVisible();
}
});
}
public void testAddingSwingComponentToWrappedHierarchyMakesItNotDoubleBuffer() {
final JPanel panel = new JPanel();
final PSwing pSwing = new PSwing(panel);
final JComponent child = new JLabel("Test Component");
child.setDoubleBuffered(true);
panel.add(child);
assertFalse(child.isDoubleBuffered());
}
public void assertDelayedSuccess(String message, int delay, Predicate p) {
int remainingTries = delay / 50;
while (remainingTries > 0) {
if (p.isTrue()) {
return;
}
remainingTries
try {
Thread.sleep(50);
}
catch (InterruptedException e) {
// do nothing
}
}
fail(message);
}
public void assertDelayedSuccess(int delay, Predicate p) {
assertDelayedSuccess("Failed asserting delayed success", delay, p);
}
private interface Predicate {
boolean isTrue();
}
public void testHidingPNodeHidesComponent() {
final JPanel panel = new JPanel();
final PSwing pSwing = new PSwing(panel);
pSwing.setVisible(false);
assertFalse(panel.isVisible());
}
public void testPaintTooSmallPaintsGreek() {
final JPanel panel = new JPanel();
panel.setBounds(0, 0, 100, 100);
final MockPaintingPSwing pSwing = new MockPaintingPSwing(panel);
BufferedImage image = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB);
Graphics2D graphics = image.createGraphics();
graphics.setTransform(AffineTransform.getScaleInstance(0.01, 0.01));
PPaintContext paintContext = new PPaintContext(graphics);
pSwing.paint(paintContext);
assertTrue(pSwing.isPaintedGreek());
assertFalse(pSwing.isPaintedComponent());
}
public void testPaintBigPaintsComponent() {
final JPanel panel = new JPanel();
panel.setBounds(0, 0, 100, 100);
final MockPaintingPSwing pSwing = new MockPaintingPSwing(panel);
BufferedImage image = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB);
Graphics2D graphics = image.createGraphics();
graphics.setTransform(AffineTransform.getScaleInstance(5, 5));
PPaintContext paintContext = new PPaintContext(graphics);
pSwing.paint(paintContext);
assertFalse(pSwing.isPaintedGreek());
assertTrue(pSwing.isPaintedComponent());
}
public void testGreekThresholdIsHonoured() {
final JPanel panel = new JPanel();
panel.setBounds(0, 0, 100, 100);
final MockPaintingPSwing pSwing = new MockPaintingPSwing(panel);
pSwing.setGreekThreshold(2);
BufferedImage image = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB);
Graphics2D graphics = image.createGraphics();
PPaintContext paintContext = new PPaintContext(graphics);
pSwing.paint(paintContext);
assertTrue(pSwing.isPaintedGreek());
assertFalse(pSwing.isPaintedComponent());
}
public void testGreekThresholdIsPersisted() {
final JPanel panel = new JPanel();
final MockPaintingPSwing pSwing = new MockPaintingPSwing(panel);
pSwing.setGreekThreshold(2);
assertEquals(2, pSwing.getGreekThreshold(), Double.MIN_VALUE);
pSwing.setGreekThreshold(0.5);
assertEquals(0.5, pSwing.getGreekThreshold(), Double.MIN_VALUE);
}
public void testAssertSettingJLabelWidthTooSmallGrowsIt() {
final JLabel label = new JLabel("Hello");
PSwingCanvas canvas = new PSwingCanvas();
canvas.setBounds(0, 0, 100, 100);
final MockPaintingPSwing swing = new MockPaintingPSwing(label);
assertDelayedSuccess(500,
new Predicate() {
public boolean isTrue() {
return label.getMinimumSize().getWidth() != 0;
}
});
swing.setWidth(10);
canvas.getLayer().addChild(swing);
canvas.doLayout();
// While paint, it uses the graphics element to determine the font's
// display size and hence determine minimum size of JLabel.
swing.paint();
assertFalse(10 == swing.getWidth());
}
public void testAssertSettingJButtonWidthTooSmallGrowsIt() {
JButton label = new JButton("Hello");
PSwingCanvas canvas = new PSwingCanvas();
canvas.setBounds(0, 0, 100, 100);
MockPaintingPSwing swing = new MockPaintingPSwing(label);
assertFalse(label.getMinimumSize().getWidth() == 0);
swing.setWidth(10);
canvas.getLayer().addChild(swing);
canvas.doLayout();
// While paint, it uses the graphics element to determine the font's
// display size and hence determine minimum size of JLabel.
swing.paint();
assertFalse(10 == swing.getWidth());
}
public void testPSwingAttachesItselfToItsCanvasWhenAddedToItsSceneGraph() {
PSwingCanvas canvas1 = new PSwingCanvas();
PSwing label = new PSwing(new JLabel("Hello"));
assertEquals(0, canvas1.getSwingWrapper().getComponentCount());
canvas1.getLayer().addChild(label);
assertEquals(1, canvas1.getSwingWrapper().getComponentCount());
}
public void testPSwingRemovesItselfFromItsCanvasWhenRemovedFromScene() {
PSwingCanvas canvas1 = new PSwingCanvas();
PSwing label = new PSwing(new JLabel("Hello"));
canvas1.getLayer().addChild(label);
assertEquals(1, canvas1.getSwingWrapper().getComponentCount());
label.removeFromParent();
assertEquals(0, canvas1.getSwingWrapper().getComponentCount());
}
public void testPSwingReattachesItselfWhenMovedFromCanvasToCanvas() {
PSwingCanvas canvas1 = new PSwingCanvas();
PSwingCanvas canvas2 = new PSwingCanvas();
PSwing label = new PSwing(new JLabel("Hello"));
canvas1.getLayer().addChild(label);
canvas2.getLayer().addChild(label);
assertEquals(0, canvas1.getSwingWrapper().getComponentCount());
assertEquals(1, canvas2.getSwingWrapper().getComponentCount());
}
public void testPSwingRegistersWithCanvasThroughoutItsLifeCycle() {
PSwingCanvas canvas = new PSwingCanvas();
PSwing label = new PSwing(new JLabel("Hello"));
canvas.getLayer().addChild(label);
assertEquals(1, canvas.getSwingWrapper().getComponentCount());
label.removeFromParent();
assertEquals(0, canvas.getSwingWrapper().getComponentCount());
canvas.getLayer().addChild(label);
assertEquals(1, canvas.getSwingWrapper().getComponentCount());
}
public class MockPaintingPSwing extends PSwing {
private boolean paintedGreek;
private boolean paintedComponent;
public MockPaintingPSwing(JComponent component) {
super(component);
}
public void paintOnto(BufferedImage image) {
PPaintContext paintContext = new PPaintContext(image.createGraphics());
paint(paintContext);
}
public BufferedImage paint() {
BufferedImage image = new BufferedImage((int) getWidth(), (int) getHeight(), BufferedImage.TYPE_INT_RGB);
paintOnto(image);
return image;
}
public BufferedImage paintComponent() {
BufferedImage image = new BufferedImage((int) getWidth(), (int) getHeight(), BufferedImage.TYPE_INT_RGB);
paintComponentOnto(image);
return image;
}
public void paintComponentOnto(BufferedImage image) {
paint(image.createGraphics());
}
public void paint(Graphics2D paintContext) {
super.paint(paintContext);
paintedComponent = true;
}
public void paintAsGreek(Graphics2D paintContext) {
super.paintAsGreek(paintContext);
paintedGreek = true;
}
public boolean isPaintedGreek() {
return paintedGreek;
}
public boolean isPaintedComponent() {
return paintedComponent;
}
}
}
|
package org.pentaho.ui.xul.swt.tags;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabFolder2Listener;
import org.eclipse.swt.custom.CTabFolderEvent;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulDomContainer;
import org.pentaho.ui.xul.XulException;
import org.pentaho.ui.xul.components.XulTabpanel;
import org.pentaho.ui.xul.containers.XulTabbox;
import org.pentaho.ui.xul.containers.XulTabpanels;
import org.pentaho.ui.xul.containers.XulTabs;
import org.pentaho.ui.xul.dom.Element;
import org.pentaho.ui.xul.swt.AbstractSwtXulContainer;
public class SwtTabbox extends AbstractSwtXulContainer implements XulTabbox {
private CTabFolder tabFolder;
private SwtTabpanels panels;
private SwtTabs tabs;
private int selectedIndex = -1;
private boolean closable;
private String onclose;
private XulDomContainer domContainer;
// used to prevent recursing.
private boolean suppressRemoveEvents;
public SwtTabbox(Element self, XulComponent parent, XulDomContainer domContainer, String tagName) {
super("tabbox");
int style = SWT.MULTI;
if (self.getAttributeValue("closable") != null && self.getAttributeValue("closable").equals("true")) {
style |= SWT.CLOSE;
}
this.domContainer = domContainer;
tabFolder = new CTabFolder((Composite) parent.getManagedObject(), style);
tabFolder.setLayoutData(new GridData(GridData.FILL_BOTH));
tabFolder.setSimple(false);
tabFolder.setUnselectedImageVisible(true);
tabFolder.setUnselectedCloseVisible(true);
tabFolder.setBorderVisible(true);
tabFolder.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent arg0) {
int prevVal = selectedIndex;
selectedIndex = tabFolder.getSelectionIndex();
SwtTabbox.this.changeSupport.firePropertyChange("selectedIndex", prevVal, selectedIndex);
}
});
tabFolder.addCTabFolder2Listener(new CTabFolder2Listener() {
public void close(CTabFolderEvent arg0) {
if (onclose != null) {
try {
int pos = 0;
for (int i = 0; i < tabFolder.getItems().length; i++) {
if (tabFolder.getItems()[i] == arg0.item) {
pos = i;
break;
}
}
Boolean returnVal = (Boolean) SwtTabbox.this.domContainer.invoke(onclose, new Object[] { pos });
if (returnVal == true) {
remove(pos);
} else {
arg0.doit = false;
}
} catch (XulException e) {
e.printStackTrace();
}
} else {
remove(tabFolder.getSelectionIndex());
}
}
public void maximize(CTabFolderEvent arg0) {
}
public void minimize(CTabFolderEvent arg0) {
}
public void restore(CTabFolderEvent arg0) {
}
public void showList(CTabFolderEvent arg0) {
}
});
// Set a small vertical gradient
tabFolder.setSelectionBackground(new Color[] {
tabFolder.getDisplay().getSystemColor(SWT.COLOR_WIDGET_NORMAL_SHADOW),
tabFolder.getDisplay().getSystemColor(SWT.COLOR_WIDGET_LIGHT_SHADOW), }, new int[] { 55, }, true);
setManagedObject(tabFolder);
}
private void remove(int pos) {
suppressRemoveEvents = true;
this.tabs.removeChild(this.tabs.getChildNodes().get(pos));
this.panels.removeChild(this.panels.getChildNodes().get(pos));
if (tabs.getChildNodes().size() == 0) { // last one doesn't fire selection event. Manually do that here
setSelectedIndex(-1);
}
suppressRemoveEvents = false;
}
@Override
public void addChild(Element ele) {
super.addChild(ele);
if (ele instanceof SwtTabs) {
this.tabs = (SwtTabs) ele;
} else if (ele instanceof SwtTabpanels) {
this.panels = (SwtTabpanels) ele;
}
}
public int getSelectedIndex() {
return tabFolder.getSelectionIndex();
}
public XulTabpanel getSelectedPanel() {
return panels.getTabpanelByIndex(getSelectedIndex());
}
public XulTabpanels getTabpanels() {
return panels;
}
public XulTabs getTabs() {
return tabs;
}
public void setSelectedIndex(int index) {
int prevVal = selectedIndex;
selectedIndex = index;
SwtTabbox.this.changeSupport.firePropertyChange("selectedIndex", prevVal, selectedIndex);
if (tabFolder.getItemCount() > 0) { // component instantiated
tabFolder.setSelection(selectedIndex);
// Programatic set selectedIndex does not fire listener.
int sel = tabFolder.getSelectionIndex();
}
}
@Override
public void layout() {
CTabItem[] t = tabFolder.getItems();
for (int i = 0; i < t.length; i++) {
t[i].dispose();
}
for (int i = 0; i < tabs.getChildNodes().size(); i++) {
int style = SWT.None;
int tabIndex = 0;
if (isClosable()) {
style = SWT.Close;
}
SwtTab tab = (SwtTab) tabs.getChildNodes().get(i);
if (tab.isVisible()) {
CTabItem item = new CTabItem(tabFolder, style);
item.setText(tabs.getTabByIndex(i).getLabel());
// There could be a chance that the tabs have been added but tab panel are still
// not there. In this case we need to break from this layout routine
if (panels.getChildNodes().size() <= i) {
break;
}
item.setControl((Control) panels.getTabpanelByIndex(i).getManagedObject());
tabFolder.getItem(tabIndex++).getControl().setEnabled(!tabs.getTabByIndex(i).isDisabled());
}
}
tabFolder.layout(true, true);
if (selectedIndex < 0 && tabFolder.getItemCount() > 0) {
selectedIndex = 0;
}
setSelectedIndex(selectedIndex);
}
public void setTabDisabledAt(boolean flag, int pos) {
tabFolder.getItem(pos).getControl().setEnabled(!flag);
}
public void setTabVisibleAt(boolean flag, int pos) {
tabFolder.getItem(pos).getControl().setVisible(flag);
}
public void updateTabState() {
for (int i = 0; i < tabs.getChildNodes().size(); i++) {
tabFolder.getItem(i).setText("" + tabs.getTabByIndex(i).getLabel());
tabFolder.getItem(i).getControl().setEnabled(!tabs.getTabByIndex(i).isDisabled());
}
}
public void removeTab(int idx) {
if(suppressRemoveEvents){
return;
}
if (tabFolder.getItemCount() >= idx) {
tabFolder.getItem(idx).dispose();
}
}
public void removeTabpanel(int idx) {
if(suppressRemoveEvents){
return;
}
if (tabFolder.getItemCount() > idx) {
tabFolder.getItem(idx).dispose();
}
}
public void addTab(int idx) {
int style = SWT.None;
if (isClosable()) {
style = SWT.Close;
}
CTabItem item = new CTabItem(tabFolder, style);
String lbl = tabs.getTabByIndex(idx).getLabel();
if (lbl != null) {
item.setText(lbl);
}
//may have been added after panel
//addTabpanel(idx);
if (selectedIndex < 0) {
selectedIndex = 0;
}
setSelectedIndex(selectedIndex);
}
public void addTabpanel(int idx) {
//not sure if the tab has been added first, ignore if not
if (tabFolder.getItemCount() <= idx || panels.getChildNodes().size() <= idx) {
return;
}
CTabItem item = tabFolder.getItem(idx);
Control control = (Control) panels.getTabpanelByIndex(idx).getManagedObject();
if (control.getParent() != tabFolder) {
control.setParent(tabFolder);
}
item.setControl(control);
item.getControl().setEnabled(!tabs.getTabByIndex(idx).isDisabled());
}
public void setClosable(boolean flag) {
this.closable = flag;
}
public boolean isClosable() {
return this.closable;
}
public void setOnclose(String command) {
this.onclose = command;
}
}
|
package org.wiztools.restclient;
import static org.junit.Assert.assertEquals;
import org.junit.*;
import org.wiztools.commons.MultiValueMap;
import org.wiztools.commons.MultiValueMapLinkedHashSet;
import org.wiztools.restclient.util.HttpUtil;
import org.wiztools.restclient.util.Util;
/**
*
* @author subWiz
*/
public class UtilTest {
public UtilTest() {
}
@BeforeClass
public static void setUpClass() throws Exception {
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of parameterEncode method, of class Util.
*/
@Test
public void testParameterEncode() {
System.out.println("parameterEncode");
MultiValueMap<String, String> params = new MultiValueMapLinkedHashSet<String, String>();
params.put("q", "r1");
params.put("q", "r2");
String expResult = "q=r1&q=r2";
String result = Util.parameterEncode(params);
assertEquals(expResult, result);
}
/**
* Test of getStatusCodeFromStatusLine method, of class Util.
*/
/*@Test
public void testGetStatusCodeFromStatusLine() {
System.out.println("getStatusCodeFromStatusLine");
String statusLine = "";
int expResult = 0;
int result = Util.getStatusCodeFromStatusLine(statusLine);
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}*/
/**
* Test of getFormattedContentType method, of class Util.
*/
/*@Test
public void testGetFormattedContentType() {
System.out.println("getFormattedContentType");
String contentType = "";
String charset = "";
String expResult = "";
String result = Util.getFormattedContentType(contentType, charset);
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}*/
/**
* Test of getCharsetFromContentType method, of class Util.
*/
@Test
public void testGetCharsetFromContentType() {
System.out.println("getCharsetFromContentType");
String contentType = "Content-type: text/html; charset=UTF-8";
String expResult = "UTF-8";
String result = HttpUtil.getCharsetFromContentType(contentType);
assertEquals(expResult, result);
// when charset is not available, return null:
contentType = "Content-type: text/html";
expResult = null;
result = HttpUtil.getCharsetFromContentType(contentType);
assertEquals(expResult, result);
}
@Test
public void testGetMimeFromContentType() {
System.out.println("getMimeFromContentType");
String contentType = "application/xml;charset=UTF-8";
String expResult = "application/xml";
String result = HttpUtil.getMimeFromContentType(contentType);
assertEquals(expResult, result);
}
}
|
package blue.lapis.pore.impl.event;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import blue.lapis.pore.PoreTests;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.common.reflect.ClassPath;
import org.apache.commons.lang.StringUtils;
import org.bukkit.event.Event;
import org.junit.BeforeClass;
import org.junit.Test;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Set;
public class PoreEventTest {
private static final String BUKKIT_PACKAGE = "org.bukkit.event";
private static final String PORE_PACKAGE = "blue.lapis.pore.impl.event";
private static ImmutableCollection<Class<?>> bukkitEvents;
private static ImmutableCollection<Class<?>> poreEvents;
@BeforeClass
public static void findEvents() throws Exception {
ClassPath classPath = ClassPath.from(ClassLoader.getSystemClassLoader());
ImmutableCollection.Builder<Class<?>> builder = ImmutableSet.builder();
for (ClassPath.ClassInfo info : classPath.getTopLevelClassesRecursive(BUKKIT_PACKAGE)) {
Class<?> event = info.load();
if (Event.class.isAssignableFrom(event) && !Modifier.isAbstract(event.getModifiers())) {
builder.add(event);
}
}
bukkitEvents = builder.build();
builder = ImmutableSet.builder();
for (ClassPath.ClassInfo info : classPath.getTopLevelClassesRecursive(PORE_PACKAGE)) {
Class<?> type = info.load();
if (Event.class.isAssignableFrom(type)) {
builder.add(type);
}
}
poreEvents = builder.build();
}
@Test
public void checkNames() {
for (Class<?> eventImpl : poreEvents) {
Class<?> bukkitEvent = eventImpl.getSuperclass();
String poreName = StringUtils.removeStart(eventImpl.getName(), PORE_PACKAGE + '.');
String porePackage = StringUtils.substringBeforeLast(poreName, ".");
poreName = StringUtils.substringAfterLast(poreName, ".");
String bukkitName = StringUtils.removeStart(bukkitEvent.getName(), BUKKIT_PACKAGE + '.');
String bukkitPackage = StringUtils.substringBeforeLast(bukkitName, ".");
bukkitName = StringUtils.substringAfterLast(bukkitName, ".");
String expectedName = "Pore" + bukkitName;
assertTrue(poreName + " should be called " + expectedName, poreName.equals(expectedName));
assertTrue(poreName + " is in wrong package: should be in " + PORE_PACKAGE + '.' + bukkitPackage,
porePackage.equals(bukkitPackage));
}
}
@Test
public void findUnimplementedEvents() {
Set<Class<?>> events = Sets.newLinkedHashSet(bukkitEvents);
for (Class<?> eventImpl : poreEvents) {
events.remove(eventImpl.getSuperclass());
}
if (!events.isEmpty()) {
for (Class<?> event : events) {
String bukkitPackage = StringUtils.removeStart(event.getPackage().getName(), BUKKIT_PACKAGE + '.');
PoreTests.getLogger().warn(bukkitPackage + ": Pore" + event.getSimpleName() + " is missing");
}
}
}
private static void checkSpongeEvent(Class<?> eventImpl, Class<?> type) {
assertTrue(eventImpl.getSimpleName() + ": " + type.getSimpleName() + " is not a sponge event",
org.spongepowered.api.util.event.Event.class.isAssignableFrom(type));
}
@Test
public void checkHandleGetter() {
for (Class<?> eventImpl : poreEvents) {
try {
Method method = eventImpl.getMethod("getHandle");
checkSpongeEvent(eventImpl, method.getReturnType());
} catch (NoSuchMethodException ignored) {
fail(eventImpl.getSimpleName() + ": missing getHandle() method (handle getter)");
}
}
}
@Test
public void checkHandleField() {
for (Class<?> eventImpl : poreEvents) {
try {
Field field = eventImpl.getDeclaredField("handle");
checkSpongeEvent(eventImpl, field.getType());
} catch (NoSuchFieldException e) {
fail(eventImpl.getSimpleName() + ": missing handle field");
}
}
}
@Test
public void checkConstructor() throws Throwable {
events:
for (Class<?> eventImpl : poreEvents) {
for (Constructor<?> constructor : eventImpl.getConstructors()) {
Class<?>[] parameters = constructor.getParameterTypes();
if (parameters.length == 1) {
Class<?> handle = parameters[0];
if (org.spongepowered.api.util.event.Event.class.isAssignableFrom(handle)) {
// Check for null check
try {
constructor.newInstance(new Object[]{null});
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause != null) {
if (cause instanceof NullPointerException
&& Objects.equal(cause.getMessage(), "handle")) {
continue events;
}
throw cause;
}
throw e;
}
fail(eventImpl.getSimpleName() + ": missing null-check for handle");
}
}
}
fail(eventImpl.getSimpleName() + ": missing handle constructor");
}
}
@Test
public void checkImplementedMethods() {
for (Class<?> eventImpl : poreEvents) {
Class<?> bukkitEvent = eventImpl.getSuperclass();
for (Method method : bukkitEvent.getMethods()) {
int modifiers = method.getModifiers();
if (Modifier.isStatic(modifiers) || isDefault(method)
|| method.getDeclaringClass() == Event.class || method.getDeclaringClass() == Object.class
|| method.getName().equals("getHandlers") || method.getName().startsWith("_INVALID_")) {
continue;
}
try {
eventImpl.getDeclaredMethod(method.getName(), method.getParameterTypes());
} catch (NoSuchMethodException e) {
fail(eventImpl.getSimpleName() + ": should override method " + method);
}
}
}
}
// Taken from JDK8 for compatibility with older Java versions
private static boolean isDefault(Method method) {
// Default methods are public non-abstract instance methods declared in an interface.
return ((method.getModifiers() & (Modifier.ABSTRACT | Modifier.PUBLIC | Modifier.STATIC)) == Modifier.PUBLIC)
&& method.getDeclaringClass().isInterface();
}
@Test
public void checkInvalidMethods() {
for (Class<?> eventImpl : poreEvents) {
for (Method method : eventImpl.getDeclaredMethods()) {
if (method.getName().startsWith("_INVALID_")) {
fail(eventImpl.getSimpleName() + ": shouldn't override _INVALID_ method " + method);
}
}
}
}
}
|
package org.postgresql.test.jdbc2;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import org.postgresql.Driver;
import org.postgresql.PGEnvironment;
import org.postgresql.PGProperty;
import org.postgresql.test.TestUtil;
import org.postgresql.util.URLCoder;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import uk.org.webcompere.systemstubs.environment.EnvironmentVariables;
import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension;
import uk.org.webcompere.systemstubs.properties.SystemProperties;
import uk.org.webcompere.systemstubs.resource.Resources;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.lang.reflect.Method;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Properties;
/*
* Tests the dynamically created class org.postgresql.Driver
*
*/
@ExtendWith(SystemStubsExtension.class)
public class DriverTest {
@Test
public void urlIsNotForPostgreSQL() throws SQLException {
Driver driver = new Driver();
assertNull(driver.connect("jdbc:otherdb:database", new Properties()));
}
/**
* According to the javadoc of java.sql.Driver.connect(...), calling abort when the {@code executor} is {@code null}
* results in SQLException
*/
@Test
public void urlIsNull() throws SQLException {
Driver driver = new Driver();
assertThrows(SQLException.class, () -> driver.connect(null, new Properties()));
}
/*
* This tests the acceptsURL() method with a couple of well and poorly formed jdbc urls.
*/
@Test
public void testAcceptsURL() throws Exception {
TestUtil.initDriver(); // Set up log levels, etc.
// Load the driver (note clients should never do it this way!)
org.postgresql.Driver drv = new org.postgresql.Driver();
assertNotNull(drv);
// These are always correct
verifyUrl(drv, "jdbc:postgresql:test", "localhost", "5432", "test");
verifyUrl(drv, "jdbc:postgresql://localhost/test", "localhost", "5432", "test");
verifyUrl(drv, "jdbc:postgresql://localhost,locahost2/test", "localhost,locahost2", "5432,5432", "test");
verifyUrl(drv, "jdbc:postgresql://localhost:5433,locahost2:5434/test", "localhost,locahost2", "5433,5434", "test");
verifyUrl(drv, "jdbc:postgresql://[::1]:5433,:5434,[::1]/test", "[::1],localhost,[::1]", "5433,5434,5432", "test");
verifyUrl(drv, "jdbc:postgresql://localhost/test?port=8888", "localhost", "8888", "test");
verifyUrl(drv, "jdbc:postgresql://localhost:5432/test", "localhost", "5432", "test");
verifyUrl(drv, "jdbc:postgresql://localhost:5432/test?dbname=test2", "localhost", "5432", "test2");
verifyUrl(drv, "jdbc:postgresql://127.0.0.1/anydbname", "127.0.0.1", "5432", "anydbname");
verifyUrl(drv, "jdbc:postgresql://127.0.0.1:5433/hidden", "127.0.0.1", "5433", "hidden");
verifyUrl(drv, "jdbc:postgresql://127.0.0.1:5433/hidden?port=7777", "127.0.0.1", "7777", "hidden");
verifyUrl(drv, "jdbc:postgresql://[::1]:5740/db", "[::1]", "5740", "db");
verifyUrl(drv, "jdbc:postgresql://[::1]:5740/my%20data%23base%251?loggerFile=C%3A%5Cdir%5Cfile.log", "[::1]", "5740", "my data#base%1");
// tests for service syntax
URL urlFileProps = getClass().getResource("/pg_service/pgservicefileProps.conf");
assertNotNull(urlFileProps);
Resources.with(
new SystemProperties(PGEnvironment.ORG_POSTGRESQL_PGSERVICEFILE.getName(), urlFileProps.getFile())
).execute(() -> {
// correct cases
verifyUrl(drv, "jdbc:postgresql://?service=driverTestService1", "test-host1", "5444", "testdb1");
verifyUrl(drv, "jdbc:postgresql://?service=driverTestService1&host=other-host", "other-host", "5444", "testdb1");
verifyUrl(drv, "jdbc:postgresql:///?service=driverTestService1", "test-host1", "5444", "testdb1");
verifyUrl(drv, "jdbc:postgresql:///?service=driverTestService1&port=3333&dbname=other-db", "test-host1", "3333", "other-db");
verifyUrl(drv, "jdbc:postgresql://localhost:5432/test?service=driverTestService1", "localhost", "5432", "test");
verifyUrl(drv, "jdbc:postgresql://localhost:5432/test?port=7777&dbname=other-db&service=driverTestService1", "localhost", "7777", "other-db");
verifyUrl(drv, "jdbc:postgresql://[::1]:5740/?service=driverTestService1", "[::1]", "5740", "testdb1");
verifyUrl(drv, "jdbc:postgresql://:5740/?service=driverTestService1", "localhost", "5740", "testdb1");
verifyUrl(drv, "jdbc:postgresql://[::1]/?service=driverTestService1", "[::1]", "5432", "testdb1");
verifyUrl(drv, "jdbc:postgresql://localhost/?service=driverTestService2", "localhost", "5432", "testdb1");
// fail cases
assertFalse(drv.acceptsURL("jdbc:postgresql://?service=driverTestService2"));
});
// Badly formatted url's
assertFalse(drv.acceptsURL("jdbc:postgres:test"));
assertFalse(drv.acceptsURL("jdbc:postgresql:/test"));
assertFalse(drv.acceptsURL("jdbc:postgresql:
assertFalse(drv.acceptsURL("jdbc:postgresql:///?service=my data#base%1"));
assertFalse(drv.acceptsURL("jdbc:postgresql://[::1]:5740/my data#base%1"));
assertFalse(drv.acceptsURL("jdbc:postgresql://localhost/dbname?loggerFile=C%3A%5Cdir%5Cfile.%log"));
assertFalse(drv.acceptsURL("postgresql:test"));
assertFalse(drv.acceptsURL("db"));
assertFalse(drv.acceptsURL("jdbc:postgresql://localhost:5432a/test"));
assertFalse(drv.acceptsURL("jdbc:postgresql://localhost:500000/test"));
assertFalse(drv.acceptsURL("jdbc:postgresql://localhost:0/test"));
assertFalse(drv.acceptsURL("jdbc:postgresql://localhost:-2/test"));
// failover urls
verifyUrl(drv, "jdbc:postgresql://localhost,127.0.0.1:5432/test", "localhost,127.0.0.1",
"5432,5432", "test");
verifyUrl(drv, "jdbc:postgresql://localhost:5433,127.0.0.1:5432/test", "localhost,127.0.0.1",
"5433,5432", "test");
verifyUrl(drv, "jdbc:postgresql://[::1],[::1]:5432/db", "[::1],[::1]", "5432,5432", "db");
verifyUrl(drv, "jdbc:postgresql://[::1]:5740,127.0.0.1:5432/db", "[::1],127.0.0.1", "5740,5432",
"db");
}
private void verifyUrl(Driver drv, String url, String hosts, String ports, String dbName)
throws Exception {
assertTrue(drv.acceptsURL(url), url);
Method parseMethod =
drv.getClass().getDeclaredMethod("parseURL", String.class, Properties.class);
parseMethod.setAccessible(true);
Properties p = (Properties) parseMethod.invoke(drv, url, null);
assertEquals(dbName, p.getProperty(PGProperty.PG_DBNAME.getName()), url);
assertEquals(hosts, p.getProperty(PGProperty.PG_HOST.getName()), url);
assertEquals(ports, p.getProperty(PGProperty.PG_PORT.getName()), url);
}
/**
* Tests the connect method by connecting to the test database.
*/
@Test
public void testConnect() throws Exception {
TestUtil.initDriver(); // Set up log levels, etc.
// Test with the url, username & password
Connection con =
DriverManager.getConnection(TestUtil.getURL(), TestUtil.getUser(), TestUtil.getPassword());
assertNotNull(con);
con.close();
// Test with the username in the url
con = DriverManager.getConnection(
TestUtil.getURL()
+ "&user=" + URLCoder.encode(TestUtil.getUser())
+ "&password=" + URLCoder.encode(TestUtil.getPassword()));
assertNotNull(con);
con.close();
// Test with failover url
}
/**
* Tests the connect method by connecting to the test database.
*/
@Test
public void testConnectService() throws Exception {
TestUtil.initDriver(); // Set up log levels, etc.
String wrongPort = "65536";
// Create temporary pg_service.conf file
Path tempDirWithPrefix = Files.createTempDirectory("junit");
Path tempFile = Files.createTempFile(tempDirWithPrefix, "pg_service", "conf");
try {
// Write service section
String testService1 = "testService1"; // with correct port
String testService2 = "testService2"; // with wrong port
try (PrintStream ps = new PrintStream(Files.newOutputStream(tempFile))) {
ps.printf("[%s]%nhost=%s%nport=%s%ndbname=%s%nuser=%s%npassword=%s%n", testService1, TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser(), TestUtil.getPassword());
ps.printf("[%s]%nhost=%s%nport=%s%ndbname=%s%nuser=%s%npassword=%s%n", testService2, TestUtil.getServer(), wrongPort, TestUtil.getDatabase(), TestUtil.getUser(), TestUtil.getPassword());
}
// consume service
Resources.with(
new EnvironmentVariables(PGEnvironment.PGSERVICEFILE.getName(), tempFile.toString(), PGEnvironment.PGSYSCONFDIR.getName(), ""),
new SystemProperties(PGEnvironment.ORG_POSTGRESQL_PGSERVICEFILE.getName(), "", "user.home", "/tmp/dir-non-existent")
).execute(() -> {
// testing that properties overriding priority is correct (POSITIVE cases)
// service=correct port
Connection con = DriverManager.getConnection(String.format("jdbc:postgresql://?service=%s", testService1));
assertNotNull(con);
con.close();
// service=wrong port; Properties=correct port
Properties info = new Properties();
info.setProperty("PGPORT", String.valueOf(TestUtil.getPort()));
con = DriverManager.getConnection(String.format("jdbc:postgresql://?service=%s", testService2), info);
assertNotNull(con);
con.close();
// service=wrong port; Properties=wrong port; URL port=correct
info.setProperty("PGPORT", wrongPort);
con = DriverManager.getConnection(String.format("jdbc:postgresql://:%s/?service=%s", TestUtil.getPort(), testService2), info);
assertNotNull(con);
con.close();
// service=wrong port; Properties=wrong port; URL port=wrong; URL argument=correct port
con = DriverManager.getConnection(String.format("jdbc:postgresql://:%s/?service=%s&port=%s", wrongPort, testService2, TestUtil.getPort()), info);
assertNotNull(con);
con.close();
// testing that properties overriding priority is correct (NEGATIVE cases)
// service=wrong port
try {
con = DriverManager.getConnection(String.format("jdbc:postgresql://?service=%s", testService2));
fail("Expected an SQLException because port is out of range");
} catch (SQLException e) {
// Expected exception.
}
// service=correct port; Properties=wrong port
info.setProperty("PGPORT", wrongPort);
try {
con = DriverManager.getConnection(String.format("jdbc:postgresql://?service=%s", testService1), info);
fail("Expected an SQLException because port is out of range");
} catch (SQLException e) {
// Expected exception.
}
// service=correct port; Properties=correct port; URL port=wrong
info.setProperty("PGPORT", String.valueOf(TestUtil.getPort()));
try {
con = DriverManager.getConnection(String.format("jdbc:postgresql://:%s/?service=%s", wrongPort, testService1), info);
fail("Expected an SQLException because port is out of range");
} catch (SQLException e) {
// Expected exception.
}
// service=correct port; Properties=correct port; URL port=correct; URL argument=wrong port
try {
con = DriverManager.getConnection(String.format("jdbc:postgresql://:%s/?service=%s&port=%s", TestUtil.getPort(), testService1, wrongPort), info);
fail("Expected an SQLException because port is out of range");
} catch (SQLException e) {
// Expected exception.
}
});
} finally {
// cleanup
Files.delete(tempFile);
Files.delete(tempDirWithPrefix);
}
}
/**
* Tests the password by connecting to the test database.
* password from .pgpass (correct)
*/
@Test
public void testConnectPassword01() throws Exception {
TestUtil.initDriver(); // Set up log levels, etc.
// Create temporary .pgpass file
Path tempDirWithPrefix = Files.createTempDirectory("junit");
Path tempPgPassFile = Files.createTempFile(tempDirWithPrefix, "pgpass", "conf");
try {
try (PrintStream psPass = new PrintStream(Files.newOutputStream(tempPgPassFile))) {
psPass.printf("%s:%s:%s:%s:%s%n", TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser(), TestUtil.getPassword());
}
// ignore pg_service.conf, use .pgpass
Resources.with(
new EnvironmentVariables(PGEnvironment.PGSERVICEFILE.getName(), "", PGEnvironment.PGSYSCONFDIR.getName(), ""),
new SystemProperties(PGEnvironment.ORG_POSTGRESQL_PGSERVICEFILE.getName(), "", "user.home", "/tmp/dir-non-existent",
PGEnvironment.ORG_POSTGRESQL_PGPASSFILE.getName(), tempPgPassFile.toString())
).execute(() -> {
// password from .pgpass (correct)
Connection con = DriverManager.getConnection(String.format("jdbc:postgresql://%s:%s/%s?user=%s", TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser()));
assertNotNull(con);
con.close();
});
} finally {
// cleanup
Files.delete(tempPgPassFile);
Files.delete(tempDirWithPrefix);
}
}
/**
* Tests the password by connecting to the test database.
* password from service (correct) and .pgpass (wrong)
*/
@Test
public void testConnectPassword02() throws Exception {
TestUtil.initDriver(); // Set up log levels, etc.
String wrongPassword = "random wrong";
// Create temporary pg_service.conf and .pgpass file
Path tempDirWithPrefix = Files.createTempDirectory("junit");
Path tempPgServiceFile = Files.createTempFile(tempDirWithPrefix, "pg_service", "conf");
Path tempPgPassFile = Files.createTempFile(tempDirWithPrefix, "pgpass", "conf");
try {
// Write service section
String testService1 = "testService1";
try (PrintStream psService = new PrintStream(Files.newOutputStream(tempPgServiceFile));
PrintStream psPass = new PrintStream(Files.newOutputStream(tempPgPassFile))) {
psService.printf("[%s]%nhost=%s%nport=%s%ndbname=%s%nuser=%s%npassword=%s%n", testService1, TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser(), TestUtil.getPassword());
psPass.printf("%s:%s:%s:%s:%s%n", TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser(), wrongPassword);
}
// ignore pg_service.conf, use .pgpass
Resources.with(
new SystemProperties(PGEnvironment.ORG_POSTGRESQL_PGSERVICEFILE.getName(), tempPgServiceFile.toString(), PGEnvironment.ORG_POSTGRESQL_PGPASSFILE.getName(), tempPgPassFile.toString())
).execute(() -> {
// password from service (correct) and .pgpass (wrong)
Connection con = DriverManager.getConnection(String.format("jdbc:postgresql://?service=%s", testService1));
assertNotNull(con);
con.close();
});
} finally {
// cleanup
Files.delete(tempPgPassFile);
Files.delete(tempPgServiceFile);
Files.delete(tempDirWithPrefix);
}
}
/**
* Tests the password by connecting to the test database.
* password from java property (correct) and service (wrong) and .pgpass (wrong)
*/
@Test
public void testConnectPassword03() throws Exception {
TestUtil.initDriver(); // Set up log levels, etc.
String wrongPassword = "random wrong";
// Create temporary pg_service.conf and .pgpass file
Path tempDirWithPrefix = Files.createTempDirectory("junit");
Path tempPgServiceFile = Files.createTempFile(tempDirWithPrefix, "pg_service", "conf");
Path tempPgPassFile = Files.createTempFile(tempDirWithPrefix, "pgpass", "conf");
try {
// Write service section
String testService1 = "testService1";
try (PrintStream psService = new PrintStream(Files.newOutputStream(tempPgServiceFile));
PrintStream psPass = new PrintStream(Files.newOutputStream(tempPgPassFile))) {
psService.printf("[%s]%nhost=%s%nport=%s%ndbname=%s%nuser=%s%npassword=%s%n", testService1, TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser(), wrongPassword);
psPass.printf("%s:%s:%s:%s:%s%n", TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser(), wrongPassword);
}
// ignore pg_service.conf, use .pgpass
Resources.with(
new SystemProperties(PGEnvironment.ORG_POSTGRESQL_PGSERVICEFILE.getName(), tempPgServiceFile.toString(), PGEnvironment.ORG_POSTGRESQL_PGPASSFILE.getName(), tempPgPassFile.toString())
).execute(() -> {
// password from java property (correct) and service (wrong) and .pgpass (wrong)
Properties info = new Properties();
PGProperty.PASSWORD.set(info, TestUtil.getPassword());
Connection con = DriverManager.getConnection(String.format("jdbc:postgresql://?service=%s", testService1), info);
assertNotNull(con);
con.close();
});
} finally {
// cleanup
Files.delete(tempPgPassFile);
Files.delete(tempPgServiceFile);
Files.delete(tempDirWithPrefix);
}
}
/**
* Tests the password by connecting to the test database.
* password from URL parameter (correct) and java property (wrong) and service (wrong) and .pgpass (wrong)
*/
@Test
public void testConnectPassword04() throws Exception {
TestUtil.initDriver(); // Set up log levels, etc.
String wrongPassword = "random wrong";
// Create temporary pg_service.conf and .pgpass file
Path tempDirWithPrefix = Files.createTempDirectory("junit");
Path tempPgServiceFile = Files.createTempFile(tempDirWithPrefix, "pg_service", "conf");
Path tempPgPassFile = Files.createTempFile(tempDirWithPrefix, "pgpass", "conf");
try {
// Write service section
String testService1 = "testService1";
try (PrintStream psService = new PrintStream(Files.newOutputStream(tempPgServiceFile));
PrintStream psPass = new PrintStream(Files.newOutputStream(tempPgPassFile))) {
psService.printf("[%s]%nhost=%s%nport=%s%ndbname=%s%nuser=%s%npassword=%s%n", testService1, TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser(), wrongPassword);
psPass.printf("%s:%s:%s:%s:%s%n", TestUtil.getServer(), TestUtil.getPort(), TestUtil.getDatabase(), TestUtil.getUser(), wrongPassword);
}
// ignore pg_service.conf, use .pgpass
Resources.with(
new SystemProperties(PGEnvironment.ORG_POSTGRESQL_PGSERVICEFILE.getName(), tempPgServiceFile.toString(), PGEnvironment.ORG_POSTGRESQL_PGPASSFILE.getName(), tempPgPassFile.toString())
).execute(() -> {
Properties info = new Properties();
PGProperty.PASSWORD.set(info, wrongPassword);
Connection con = DriverManager.getConnection(String.format("jdbc:postgresql://?service=%s&password=%s", testService1, TestUtil.getPassword()), info);
assertNotNull(con);
con.close();
});
} finally {
// cleanup
Files.delete(tempPgPassFile);
Files.delete(tempPgServiceFile);
Files.delete(tempDirWithPrefix);
}
}
/**
* Tests that pgjdbc performs connection failover if unable to connect to the first host in the
* URL.
*
* @throws Exception if something wrong happens
*/
@Test
public void testConnectFailover() throws Exception {
String url = "jdbc:postgresql://invalidhost.not.here," + TestUtil.getServer() + ":"
+ TestUtil.getPort() + "/" + TestUtil.getDatabase() + "?connectTimeout=5";
Connection con = DriverManager.getConnection(url, TestUtil.getUser(), TestUtil.getPassword());
assertNotNull(con);
con.close();
}
/*
* Test that the readOnly property works.
*/
@Test
public void testReadOnly() throws Exception {
TestUtil.initDriver(); // Set up log levels, etc.
Connection con = DriverManager.getConnection(TestUtil.getURL() + "&readOnly=true",
TestUtil.getUser(), TestUtil.getPassword());
assertNotNull(con);
assertTrue(con.isReadOnly());
con.close();
con = DriverManager.getConnection(TestUtil.getURL() + "&readOnly=false", TestUtil.getUser(),
TestUtil.getPassword());
assertNotNull(con);
assertFalse(con.isReadOnly());
con.close();
con =
DriverManager.getConnection(TestUtil.getURL(), TestUtil.getUser(), TestUtil.getPassword());
assertNotNull(con);
assertFalse(con.isReadOnly());
con.close();
}
@Test
public void testRegistration() throws Exception {
TestUtil.initDriver();
// Driver is initially registered because it is automatically done when class is loaded
assertTrue(org.postgresql.Driver.isRegistered());
ArrayList<java.sql.Driver> drivers = Collections.list(DriverManager.getDrivers());
searchInstanceOf: {
for (java.sql.Driver driver : drivers) {
if (driver instanceof org.postgresql.Driver) {
break searchInstanceOf;
}
}
fail("Driver has not been found in DriverManager's list but it should be registered");
}
// Deregister the driver
Driver.deregister();
assertFalse(Driver.isRegistered());
drivers = Collections.list(DriverManager.getDrivers());
for (java.sql.Driver driver : drivers) {
if (driver instanceof org.postgresql.Driver) {
fail("Driver should be deregistered but it is still present in DriverManager's list");
}
}
// register again the driver
Driver.register();
assertTrue(Driver.isRegistered());
drivers = Collections.list(DriverManager.getDrivers());
for (java.sql.Driver driver : drivers) {
if (driver instanceof org.postgresql.Driver) {
return;
}
}
fail("Driver has not been found in DriverManager's list but it should be registered");
}
@Test
public void testSystemErrIsNotClosedWhenCreatedMultipleConnections() throws Exception {
TestUtil.initDriver();
PrintStream err = System.err;
String loggerLevel = System.getProperty("loggerLevel");
String loggerFile = System.getProperty("loggerFile");
System.clearProperty("loggerLevel");
System.clearProperty("loggerFile");
System.setProperty("loggerLevel", "INFO");
PrintStream buffer = new PrintStream(new ByteArrayOutputStream());
System.setErr(buffer);
try {
Connection con = DriverManager.getConnection(TestUtil.getURL(), TestUtil.getUser(), TestUtil.getPassword());
try {
assertNotNull(con);
} finally {
con.close();
}
con = DriverManager.getConnection(TestUtil.getURL(), TestUtil.getUser(), TestUtil.getPassword());
try {
assertNotNull(con);
System.err.println();
assertFalse(System.err.checkError(), "The System.err should not be closed.");
} finally {
con.close();
}
} finally {
System.setProperty("loggerLevel", loggerLevel);
System.setProperty("loggerFile", loggerFile);
System.setErr(err);
}
}
private void setProperty(String key, String value) {
if (value == null) {
System.clearProperty(key);
} else {
System.setProperty(key, value);
}
}
}
|
package io.rapidpro.flows.utils;
import com.google.gson.*;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.rapidpro.flows.definition.Flow;
import io.rapidpro.flows.definition.GroupRef;
import io.rapidpro.flows.definition.LabelRef;
import io.rapidpro.flows.definition.actions.Action;
import org.threeten.bp.Instant;
import org.threeten.bp.LocalDateTime;
import org.threeten.bp.ZoneId;
import org.threeten.bp.ZoneOffset;
import org.threeten.bp.format.DateTimeFormatter;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* JSON utility methods
*/
public class JsonUtils {
protected static GsonBuilder s_gsonBuilder = new GsonBuilder()
.registerTypeAdapter(Flow.class, new Flow.Deserializer())
.registerTypeAdapter(Action.class, new Action.Serializer())
.registerTypeAdapter(Action.class, new Action.Deserializer())
.registerTypeAdapter(LabelRef.class, new LabelRef.Serializer())
.registerTypeAdapter(LabelRef.class, new LabelRef.Deserializer())
.registerTypeAdapter(GroupRef.class, new GroupRef.Serializer())
.registerTypeAdapter(GroupRef.class, new GroupRef.Deserializer());
protected static Gson s_gson = s_gsonBuilder.create();
protected static ThreadLocal<Flow.DeserializationContext> s_deserializationContext = new ThreadLocal<>();
public static GsonBuilder getGsonBuilder() {
return s_gsonBuilder;
}
public static Gson getGson() {
return s_gson;
}
/**
* Gets the named member as a string, returning null if it's null of it doesn't exist
* @param obj the parsed JSON object
* @param memberName the object member name
* @return the string value or null
*/
public static String getAsString(JsonObject obj, String memberName) {
JsonElement member = obj.get(memberName);
return (member == null || member.isJsonNull()) ? null : member.getAsString();
}
/**
* Instantiates a new object instance by calling a static fromJson method on its class.
* @param obj the JSON object passed to fromJson
* @param context the deserialization context
* @param clazz the class to instantiate
* @return the new object instance
*/
public static <T> T fromJson(JsonObject obj, Flow.DeserializationContext context, Class<T> clazz) {
try {
Method method = clazz.getDeclaredMethod("fromJson", JsonObject.class, Flow.DeserializationContext.class);
return (T) method.invoke(null, obj, context);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
/**
* Adapter for ZoneId instances to serialize as id string, e.g. "Africa/Kigali"
*/
public static class TimezoneAdapter extends TypeAdapter<ZoneId> {
@Override
public void write(JsonWriter out, ZoneId zoneId) throws IOException {
out.value(zoneId.getId());
}
@Override
public ZoneId read(JsonReader in) throws IOException {
return ZoneId.of(in.nextString());
}
}
/**
* Adapter for Instant instances to serialize as ISO8601 in UTC, with millisecond precision,
* e.g. "2014-10-03T01:41:12.790Z"
*/
public static class InstantAdapter extends TypeAdapter<Instant> {
protected static DateTimeFormatter s_formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
@Override
public void write(JsonWriter out, Instant instant) throws IOException {
if (instant != null) {
out.value(s_formatter.format(instant.atOffset(ZoneOffset.UTC)));
} else {
out.nullValue();
}
}
@Override
public Instant read(JsonReader in) throws IOException {
return LocalDateTime.parse(in.nextString(), s_formatter).atOffset(ZoneOffset.UTC).toInstant();
}
}
public static Flow.DeserializationContext getDeserializationContext() {
return s_deserializationContext.get();
}
public static void setDeserializationContext(Flow.DeserializationContext context) {
s_deserializationContext.set(context);
}
public static void clearDeserializationContext() {
s_deserializationContext.remove();
}
}
|
package fr.adrienbrault.idea.symfony2plugin.stubs.indexes;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.xml.XmlFile;
import com.intellij.util.indexing.*;
import com.intellij.util.io.DataExternalizer;
import com.intellij.util.io.EnumeratorStringDescriptor;
import com.intellij.util.io.KeyDescriptor;
import fr.adrienbrault.idea.symfony2plugin.Symfony2ProjectComponent;
import fr.adrienbrault.idea.symfony2plugin.config.xml.XmlHelper;
import fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.yaml.YAMLFileType;
import org.jetbrains.yaml.psi.YAMLFile;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class ContainerParameterStubIndex extends FileBasedIndexExtension<String, String> {
public static final ID<String, String> KEY = ID.create("fr.adrienbrault.idea.symfony2plugin.parameter2");
private final KeyDescriptor<String> myKeyDescriptor = new EnumeratorStringDescriptor();
@NotNull
@Override
public ID<String, String> getName() {
return KEY;
}
@NotNull
@Override
public DataIndexer<String, String, FileContent> getIndexer() {
return new DataIndexer<String, String, FileContent>() {
@NotNull
@Override
public Map<String, String> map(FileContent inputData) {
Map<String, String> map = new HashMap<String, String>();
PsiFile psiFile = inputData.getPsiFile();
if(!Symfony2ProjectComponent.isEnabled(psiFile.getProject())) {
return map;
}
if(!ServicesDefinitionStubIndex.isValidForIndex(inputData, psiFile)) {
return map;
}
if(psiFile instanceof YAMLFile) {
attachTHashMapNullable(YamlHelper.getLocalParameterMap(psiFile), map);
}
if(psiFile instanceof XmlFile) {
attachTHashMapNullable(XmlHelper.getFileParameterMap((XmlFile) psiFile), map);
}
return map;
}
};
}
/**
* workaround for nullable keys #238
*/
private void attachTHashMapNullable(Map<String, String> source, Map<String, String> tHashMap) {
for(Map.Entry<String, String> entry: source.entrySet()) {
// we can remove empty key check now? error is in "value" #238, #277
String key = entry.getKey();
if(key != null) {
// we are not allowed to save null values;
// but we can have them so provide empty value then
String value = entry.getValue();
if(value == null) value = "";
tHashMap.put(key, value);
}
}
}
@Override
public KeyDescriptor<String> getKeyDescriptor() {
return this.myKeyDescriptor;
}
@Override
public DataExternalizer<String> getValueExternalizer() {
return StringDataExternalizer.STRING_DATA_EXTERNALIZER;
}
@Override
public FileBasedIndex.InputFilter getInputFilter() {
return new FileBasedIndex.InputFilter() {
@Override
public boolean acceptInput(VirtualFile file) {
return file.getFileType() == XmlFileType.INSTANCE || file.getFileType() == YAMLFileType.YML;
}
};
}
@Override
public boolean dependsOnFileContent() {
return true;
}
@Override
public int getVersion() {
return 1;
}
private static class StringDataExternalizer implements DataExternalizer<String> {
public static final StringDataExternalizer STRING_DATA_EXTERNALIZER = new StringDataExternalizer();
private final EnumeratorStringDescriptor myStringEnumerator = new EnumeratorStringDescriptor();
@Override
public void save(DataOutput out, String value) throws IOException {
if(value == null) {
value = "";
}
this.myStringEnumerator.save(out, value);
}
@Override
public String read(DataInput in) throws IOException {
String value = this.myStringEnumerator.read(in);
// EnumeratorStringDescriptor writes out "null" as string, so workaround here
if("null".equals(value)) {
value = "";
}
// it looks like this is our "null keys not supported" #238, #277
// so dont force null values here
return value;
}
}
}
|
package com.akiban.server.test.it.dxl;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.assertSame;
import static junit.framework.Assert.assertTrue;
import static org.junit.Assert.assertNull;
import java.util.Arrays;
import java.util.List;
import com.akiban.ais.model.AkibanInformationSchema;
import com.akiban.ais.model.TableName;
import com.akiban.qp.operator.StoreAdapter;
import com.akiban.qp.row.RowBase;
import com.akiban.qp.rowtype.RowType;
import com.akiban.qp.rowtype.Schema;
import com.akiban.qp.util.SchemaCache;
import com.akiban.server.rowdata.RowData;
import com.akiban.server.test.it.ITBase;
import com.akiban.server.test.it.qp.TestRow;
import org.junit.Test;
import com.akiban.ais.model.Column;
import com.akiban.ais.model.Group;
import com.akiban.ais.model.PrimaryKey;
import com.akiban.ais.model.UserTable;
import com.akiban.server.api.dml.scan.NewRow;
import com.akiban.server.api.dml.scan.NewRowBuilder;
import com.akiban.server.error.InvalidOperationException;
import com.akiban.server.error.UnsupportedDropException;
public final class COIBasicIT extends ITBase {
private static class TableIds {
public final int c;
public final int o;
public final int i;
private TableIds(int c, int o, int i) {
this.c = c;
this.o = o;
this.i = i;
}
}
private TableIds createTables() throws InvalidOperationException {
int cId = createTable("coi", "c", "cid int not null primary key, name varchar(32)");
int oId = createTable("coi", "o", "oid int not null primary key, c_id int, GROUPING FOREIGN KEY (c_id) REFERENCES c(cid)");
createIndex("coi", "o", "__akiban_fk_o", "c_id");
int iId = createTable("coi", "i", "iid int not null primary key, o_id int, idesc varchar(32), GROUPING FOREIGN KEY (o_id) REFERENCES o(oid)");
createIndex("coi", "i", "__akiban_fk_i", "o_id");
AkibanInformationSchema ais = ddl().getAIS(session());
// Lots of checking, the more the merrier
final UserTable cTable = ais.getUserTable( ddl().getTableName(session(), cId) );
{
assertEquals("c.columns.size()", 2, cTable.getColumns().size());
assertEquals("c.indexes.size()", 1, cTable.getIndexes().size());
PrimaryKey pk = cTable.getPrimaryKey();
List<Column> expectedPkCols = Arrays.asList( cTable.getColumn("cid") );
assertEquals("pk cols", expectedPkCols, pk.getColumns());
assertSame("pk index", cTable.getIndex("PRIMARY"), pk.getIndex());
assertEquals("pk index cols size", 1, pk.getIndex().getKeyColumns().size());
assertEquals("parent join", null, cTable.getParentJoin());
assertEquals("child joins.size", 1, cTable.getChildJoins().size());
}
final UserTable oTable = ais.getUserTable( ddl().getTableName(session(), oId) );
{
assertEquals("c.columns.size()", 2, oTable.getColumns().size());
assertEquals("c.indexes.size()", 2, oTable.getIndexes().size());
PrimaryKey pk = oTable.getPrimaryKey();
List<Column> expectedPkCols = Arrays.asList( oTable.getColumn("oid") );
assertEquals("pk cols", expectedPkCols, pk.getColumns());
assertSame("pk index", oTable.getIndex("PRIMARY"), pk.getIndex());
assertEquals("pk index cols size", 1, pk.getIndex().getKeyColumns().size());
assertNotNull("parent join is null", oTable.getParentJoin());
assertSame("parent join", cTable.getChildJoins().get(0), oTable.getParentJoin());
assertEquals("child joins.size", 1, oTable.getChildJoins().size());
}
final UserTable iTable = ais.getUserTable( ddl().getTableName(session(), iId) );
{
assertEquals("c.columns.size()", 3, iTable.getColumns().size());
assertEquals("c.indexes.size()", 2, iTable.getIndexes().size());
PrimaryKey pk = iTable.getPrimaryKey();
List<Column> expectedPkCols = Arrays.asList( iTable.getColumn("iid") );
assertEquals("pk cols", expectedPkCols, pk.getColumns());
assertSame("pk index", iTable.getIndex("PRIMARY"), pk.getIndex());
assertEquals("pk index cols size", 1, pk.getIndex().getKeyColumns().size());
assertNotNull("parent join is null", iTable.getParentJoin());
assertSame("parent join", oTable.getChildJoins().get(0), iTable.getParentJoin());
assertEquals("child joins.size", 0, iTable.getChildJoins().size());
}
{
Group group = cTable.getGroup();
assertSame("o's group", group, oTable.getGroup());
assertSame("i's group", group, iTable.getGroup());
}
return new TableIds(cId, oId, iId);
}
@Test
public void simple() throws InvalidOperationException {
createTables(); // TODO placeholder test method until we get the insertToUTablesAndScan to work
}
@Test
public void insertToUTablesAndScan() throws InvalidOperationException {
final TableIds tids = createTables();
final NewRow cRow = NewRowBuilder.forTable(tids.c, getRowDef(tids.c)).put(1L).put("Robert").check(session(), dml()).row();
final NewRow oRow = NewRowBuilder.forTable(tids.o, getRowDef(tids.o)).put(10L).put(1L).check(session(), dml()).row();
final NewRow iRow = NewRowBuilder.forTable(tids.i, getRowDef(tids.i)).put(100L).put(10L).put("Desc 1").check(session(), dml()).row();
writeRows(cRow, oRow, iRow);
expectFullRows(tids.c, NewRowBuilder.copyOf(cRow).row());
expectFullRows(tids.o, NewRowBuilder.copyOf(oRow).row());
expectFullRows(tids.i, NewRowBuilder.copyOf(iRow).row());
}
@Test
public void insertToUTablesAndScanToLegacy() throws InvalidOperationException {
final TableIds tids = createTables();
final NewRow cRow = NewRowBuilder.forTable(tids.c, getRowDef(tids.c)).put(1L).put("Robert").check(session(), dml()).row();
final NewRow oRow = NewRowBuilder.forTable(tids.o, getRowDef(tids.o)).put(10L).put(1L).check(session(), dml()).row();
final NewRow iRow = NewRowBuilder.forTable(tids.i, getRowDef(tids.i)).put(100L).put(10L).put("Desc 1").check(session(), dml()).row();
writeRows(cRow, oRow, iRow);
List<RowData> cRows = scanFull(scanAllRequest(tids.c));
List<RowData> oRows = scanFull(scanAllRequest(tids.o));
List<RowData> iRows = scanFull(scanAllRequest(tids.i));
assertEquals("cRows", Arrays.asList(cRow), convertRowDatas(cRows));
assertEquals("oRows", Arrays.asList(oRow), convertRowDatas(oRows));
assertEquals("iRows", Arrays.asList(iRow), convertRowDatas(iRows));
}
@Test
public void insertToUTablesBulkAndScanToLegacy() throws InvalidOperationException {
final TableIds tids = createTables();
final NewRow cRow = NewRowBuilder.forTable(tids.c, getRowDef(tids.c)).put(1L).put("Robert").check(session(), dml()).row();
final NewRow oRow = NewRowBuilder.forTable(tids.o, getRowDef(tids.o)).put(10L).put(1L).check(session(), dml()).row();
final NewRow iRow = NewRowBuilder.forTable(tids.i, getRowDef(tids.i)).put(100L).put(10L).put("Desc 1").check(session(), dml()).row();
dml().writeRows(session(), Arrays.asList(cRow.toRowData(), oRow.toRowData(), iRow.toRowData()));
List<RowData> cRows = scanFull(scanAllRequest(tids.c));
List<RowData> oRows = scanFull(scanAllRequest(tids.o));
List<RowData> iRows = scanFull(scanAllRequest(tids.i));
assertEquals("cRows", Arrays.asList(cRow), convertRowDatas(cRows));
assertEquals("oRows", Arrays.asList(oRow), convertRowDatas(oRows));
assertEquals("iRows", Arrays.asList(iRow), convertRowDatas(iRows));
}
@Test(expected=UnsupportedDropException.class)
public void dropTableRoot() throws InvalidOperationException {
final TableIds tids = createTables();
ddl().dropTable(session(), tableName(tids.c));
}
@Test(expected=UnsupportedDropException.class)
public void dropTableMiddle() throws InvalidOperationException {
final TableIds tids = createTables();
ddl().dropTable(session(), tableName(tids.o));
}
@Test
public void dropTableLeaves() throws InvalidOperationException {
final TableIds tids = createTables();
final NewRow cRow = NewRowBuilder.forTable(tids.c, getRowDef(tids.c)).put(1L).put("Robert").check(session(), dml()).row();
final NewRow oRow = NewRowBuilder.forTable(tids.o, getRowDef(tids.o)).put(10L).put(1L).check(session(), dml()).row();
final NewRow iRow = NewRowBuilder.forTable(tids.i, getRowDef(tids.i)).put(100L).put(10L).put("Desc 1").check(session(), dml()).row();
writeRows(cRow, oRow, iRow);
List<RowData> cRows = scanFull(scanAllRequest(tids.c));
List<RowData> oRows = scanFull(scanAllRequest(tids.o));
List<RowData> iRows = scanFull(scanAllRequest(tids.i));
assertEquals("cRows", Arrays.asList(cRow), convertRowDatas(cRows));
assertEquals("oRows", Arrays.asList(oRow), convertRowDatas(oRows));
assertEquals("iRows", Arrays.asList(iRow), convertRowDatas(iRows));
ddl().dropTable(session(), tableName(tids.i));
assertEquals("oRows", Arrays.asList(oRow), convertRowDatas(oRows));
assertEquals("cRows", Arrays.asList(cRow), convertRowDatas(cRows));
ddl().dropTable(session(), tableName(tids.o));
assertEquals("cRows", Arrays.asList(cRow), convertRowDatas(cRows));
ddl().dropTable(session(), tableName(tids.c));
}
@Test
public void dropAllTablesHelper() throws InvalidOperationException {
createTables();
createTable("test", "parent", "id int not null primary key");
createTable("test", "child", "id int not null primary key, pid int, GROUPING FOREIGN KEY (pid) REFERENCES parent(id)");
dropAllTables();
}
@Test
public void dropGroup() throws InvalidOperationException {
final TableIds tids = createTables();
final TableName groupName = ddl().getAIS(session()).getUserTable(tableName(tids.i)).getGroup().getName();
ddl().dropGroup(session(), groupName);
AkibanInformationSchema ais = ddl().getAIS(session());
assertNull("expected no table", ais.getUserTable("coi", "c"));
assertNull("expected no table", ais.getUserTable("coi", "o"));
assertNull("expected no table", ais.getUserTable("coi", "i"));
assertNull("expected no group", ais.getGroup(groupName));
}
@Test
public void hKeyChangePropagation() {
final TableIds tids = createTables();
Schema schema = SchemaCache.globalSchema(ddl().getAIS(session()));
RowType cType = schema.userTableRowType(getUserTable(tids.c));
RowType oType = schema.userTableRowType(getUserTable(tids.o));
RowType iType = schema.userTableRowType(getUserTable(tids.i));
StoreAdapter adapter = newStoreAdapter(schema);
Object[] cCols = { 1, "c1" };
Object[] oCols = { 10, 1 };
Object[] o2Cols = { 20, 2 };
Object[] iCols = { 100, 10, "i100" };
TestRow cRow = new TestRow(cType, cCols);
TestRow oRow = new TestRow(oType, oCols);
TestRow o2Row = new TestRow(oType, o2Cols);
TestRow iRow = new TestRow(iType, iCols);
// Unrelated o row, to demonstrate i ordering/adoption
writeRow(tids.o, o2Cols);
compareRows( new RowBase[] { o2Row }, adapter.newGroupCursor(cType.userTable().getGroup()) );
// i is first due to null cid component
writeRow(tids.i, iCols);
compareRows( new RowBase[] { iRow, o2Row }, adapter.newGroupCursor(cType.userTable().getGroup()) );
// i should get adopted by the new o, filling in it's cid component
writeRow(tids.o, oCols);
compareRows( new RowBase[] { oRow, iRow, o2Row }, adapter.newGroupCursor(cType.userTable().getGroup()) );
writeRow(tids.c, cCols);
compareRows( new RowBase[] { cRow, oRow, iRow, o2Row }, adapter.newGroupCursor(cType.userTable().getGroup()) );
}
}
|
package com.eqot.fontawesome;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Typeface;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class FontAwesome {
private static final String FONT_FILENAME = "fontawesome-webfont.ttf";
private static Typeface sTypeface = null;
private static final Pattern pattern = Pattern.compile("([^\\{]*)\\{([\\w\\-]+)\\}(.*)");
public static void applyToAllViews(Context context, View view) {
apply(context, view);
if (!(view instanceof ViewGroup)) {
return;
}
ViewGroup viewGroup = (ViewGroup) view;
for (int i = 0, l = viewGroup.getChildCount(); i < l; i++) {
final View child = viewGroup.getChildAt(i);
applyToAllViews(context, child);
}
}
public static void apply(Context context, View view) {
if (sTypeface == null) {
sTypeface = Typeface.createFromAsset(context.getAssets(), FONT_FILENAME);
}
if (!(view instanceof TextView)) {
return;
}
final TextView textView = (TextView) view;
final CharSequence text = textView.getText();
final SpannableStringBuilder convertedText = convertText(context, text);
textView.setText(convertedText);
textView.setAllCaps(false);
}
private static SpannableStringBuilder convertText(Context context, CharSequence text) {
final SpannableStringBuilder sb = new SpannableStringBuilder();
while (true) {
final Matcher matcher = pattern.matcher(text);
if (!matcher.find()) {
sb.append(text);
break;
}
sb.append(matcher.group(1));
final String character = getCharacterFromCode(context, matcher.group(2));
final CustomTypefaceSpan typefaceSpan = new CustomTypefaceSpan("", sTypeface);
sb.append(character, typefaceSpan, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
text = matcher.group(3);
}
return sb;
}
private static String getCharacterFromCode(Context context, String code) {
code = code.replace("-", "_");
final Resources resources = context.getResources();
final int id = resources.getIdentifier(code, "string", context.getPackageName());
final String character = resources.getString(id);
return character;
}
}
|
package edu.umd.cs.findbugs;
import java.io.IOException;
import org.apache.bcel.Constants;
import org.apache.bcel.classfile.JavaClass;
import edu.umd.cs.findbugs.ba.AnalysisContext;
import edu.umd.cs.findbugs.ba.Hierarchy;
import edu.umd.cs.findbugs.ba.JavaClassAndMethod;
import edu.umd.cs.findbugs.ba.SignatureConverter;
import edu.umd.cs.findbugs.ba.SourceInfoMap;
import edu.umd.cs.findbugs.ba.XFactory;
import edu.umd.cs.findbugs.ba.XMethod;
import edu.umd.cs.findbugs.visitclass.DismantleBytecode;
import edu.umd.cs.findbugs.visitclass.PreorderVisitor;
import edu.umd.cs.findbugs.xml.XMLAttributeList;
import edu.umd.cs.findbugs.xml.XMLOutput;
/**
* A BugAnnotation specifying a particular method in a particular class.
* A MethodAnnotation may (optionally) have a SourceLineAnnotation directly
* embedded inside it to indicate the range of source lines where the
* method is defined.
*
* @author David Hovemeyer
* @see BugAnnotation
*/
public class MethodAnnotation extends PackageMemberAnnotation {
private static final long serialVersionUID = 1L;
private static final boolean UGLY_METHODS = Boolean.getBoolean("ma.ugly");
private static final String DEFAULT_ROLE = "METHOD_DEFAULT";
private String methodName;
private String methodSig;
private String fullMethod;
private boolean isStatic;
/**
* Constructor.
*
* @param className the name of the class containing the method
* @param methodName the name of the method
* @param methodSig the Java type signature of the method
* @param isStatic true if the method is static, false if not
*/
public MethodAnnotation(String className, String methodName, String methodSig, boolean isStatic) {
super(className, DEFAULT_ROLE);
this.methodName = methodName;
this.methodSig = methodSig;
this.isStatic = isStatic;
fullMethod = null;
sourceLines = null;
}
/**
* Factory method to create a MethodAnnotation from the method the
* given visitor is currently visiting.
*
* @param visitor the BetterVisitor currently visiting the method
*/
public static MethodAnnotation fromVisitedMethod(PreorderVisitor visitor) {
String className = visitor.getDottedClassName();
MethodAnnotation result = new MethodAnnotation(
className,
visitor.getMethodName(),
visitor.getMethodSig(),
visitor.getMethod().isStatic());
// Try to find the source lines for the method
SourceLineAnnotation srcLines = SourceLineAnnotation.fromVisitedMethod(visitor);
result.setSourceLines(srcLines);
return result;
}
/**
* Factory method to create a MethodAnnotation from a method
* called by the instruction the given visitor is currently visiting.
*
* @param visitor the visitor
* @return the MethodAnnotation representing the called method
*/
public static MethodAnnotation fromCalledMethod(DismantleBytecode visitor) {
String className = visitor.getDottedClassConstantOperand();
String methodName = visitor.getNameConstantOperand();
String methodSig = visitor.getDottedSigConstantOperand();
return fromCalledMethod(className, methodName, methodSig,
visitor.getOpcode() == Constants.INVOKESTATIC);
}
public static MethodAnnotation fromForeignMethod(
String className, String methodName, String methodSig, boolean isStatic) {
// Create MethodAnnotation.
// It won't have source lines yet.
MethodAnnotation methodAnnotation =
new MethodAnnotation(className, methodName, methodSig, isStatic);
// Try to find source lines by looking up the exact class and method.
SourceLineAnnotation sourceLines = null;
try {
JavaClass targetClass = AnalysisContext.currentAnalysisContext()
.lookupClass(className);
JavaClassAndMethod targetMethod = Hierarchy.findMethod(targetClass, methodName, methodSig);
if (targetMethod != null) {
sourceLines = SourceLineAnnotation.forEntireMethod(
targetMethod.getJavaClass(), targetMethod.getMethod());
}
} catch (ClassNotFoundException e) {
// Can't find the class
}
// Try consulting the SourceInfoMap
if (sourceLines == null) {
SourceInfoMap.SourceLineRange range = AnalysisContext.currentAnalysisContext()
.getSourceInfoMap()
.getMethodLine(className, methodName, methodSig);
if (range != null) {
sourceLines = new SourceLineAnnotation(
className,
AnalysisContext.currentAnalysisContext().lookupSourceFile(className),
range.getStart(),
range.getEnd(),
-1,
-1);
}
}
// If we couldn't find the source lines,
// create an unknown source line annotation referencing
// the class and source file.
if (sourceLines == null) {
sourceLines = SourceLineAnnotation.createUnknown(className);
}
methodAnnotation.setSourceLines(sourceLines);
return methodAnnotation;
}
/**
* Create a MethodAnnotation from a method that is not
* directly accessible. We will use the repository to
* try to find its class in order to populate the information
* as fully as possible.
*
* @param className class containing called method
* @param methodName name of called method
* @param methodSig signature of called method
* @param isStatic true if called method is static
* @return the MethodAnnotation for the called method
*/
public static MethodAnnotation fromCalledMethod(
String className, String methodName, String methodSig, boolean isStatic) {
MethodAnnotation methodAnnotation =
fromForeignMethod(className, methodName, methodSig, isStatic);
methodAnnotation.setDescription("METHOD_CALLED");
return methodAnnotation;
}
/**
* Create a MethodAnnotation from an XMethod.
*
* @param xmethod the XMethod
* @return the MethodAnnotation
*/
public static MethodAnnotation fromXMethod(XMethod xmethod) {
return fromForeignMethod(
xmethod.getClassName(),
xmethod.getName(),
xmethod.getSignature(),
xmethod.isStatic());
}
/**
* Get the method name.
*/
public String getMethodName() {
return methodName;
}
/**
* Get the method type signature.
*/
public String getMethodSignature() {
return methodSig;
}
/**
* Return whether or not the method is static.
*
* @return true if the method is static, false otherwise
*/
public boolean isStatic() {
return isStatic;
}
/**
* Convert to an XMethod.
*
* @return an XMethod specifying the same method as this MethodAnnotation
*/
public XMethod toXMethod() {
return XFactory.createXMethod(className, methodName, methodSig, isStatic);
}
public void accept(BugAnnotationVisitor visitor) {
visitor.visitMethodAnnotation(this);
}
protected String formatPackageMember(String key) {
if (key.equals(""))
return UGLY_METHODS ? getUglyMethod() : getFullMethod();
else if (key.equals("shortMethod"))
return className + "." + methodName + "()";
else
throw new IllegalArgumentException("unknown key " + key);
}
/**
* Get the "full" method name.
* This is a format which looks sort of like a method signature
* that would appear in Java source code.
*/
public String getFullMethod() {
if (fullMethod == null) {
// Convert to "nice" representation
SignatureConverter converter = new SignatureConverter(methodSig);
String pkgName = getPackageName();
StringBuffer args = new StringBuffer();
if (converter.getFirst() != '(')
throw new IllegalStateException("bad method signature " + methodSig);
converter.skip();
while (converter.getFirst() != ')') {
if (args.length() > 0)
args.append(',');
args.append(shorten(pkgName, converter.parseNext()));
}
converter.skip();
// NOTE: we omit the return type.
// It is not needed to disambiguate the method,
// and would just clutter the output.
// Actually, GJ implements covariant return types at the source level,
// so perhaps it really is necessary.
StringBuffer result = new StringBuffer();
result.append(className);
result.append('.');
result.append(methodName);
result.append('(');
result.append(args);
result.append(')');
fullMethod = result.toString();
}
return fullMethod;
}
private String getUglyMethod() {
return className + "." + methodName + " : " + methodSig.replace('/', '.');
}
public int hashCode() {
return className.hashCode() + methodName.hashCode() + methodSig.hashCode();
}
public boolean equals(Object o) {
if (!(o instanceof MethodAnnotation))
return false;
MethodAnnotation other = (MethodAnnotation) o;
return className.equals(other.className)
&& methodName.equals(other.methodName)
&& methodSig.equals(other.methodSig);
}
public int compareTo(BugAnnotation o) {
if (!(o instanceof MethodAnnotation)) // BugAnnotations must be Comparable with any type of BugAnnotation
return this.getClass().getName().compareTo(o.getClass().getName());
MethodAnnotation other = (MethodAnnotation) o;
int cmp;
cmp = className.compareTo(other.className);
if (cmp != 0)
return cmp;
cmp = methodName.compareTo(other.methodName);
if (cmp != 0)
return cmp;
return methodSig.compareTo(other.methodSig);
}
private static final String ELEMENT_NAME = "Method";
public void writeXML(XMLOutput xmlOutput) throws IOException {
}
public void writeXML(XMLOutput xmlOutput, boolean addMessages) throws IOException {
XMLAttributeList attributeList = new XMLAttributeList()
.addAttribute("classname", getClassName())
.addAttribute("name", getMethodName())
.addAttribute("signature", getMethodSignature())
.addAttribute("isStatic", String.valueOf(isStatic()));
String role = getDescription();
if (!role.equals(DEFAULT_ROLE))
attributeList.addAttribute("role", role);
if (sourceLines == null && !addMessages) {
xmlOutput.openCloseTag(ELEMENT_NAME, attributeList);
} else {
xmlOutput.openTag(ELEMENT_NAME, attributeList);
if (sourceLines != null) {
sourceLines.writeXML(xmlOutput);
}
if (addMessages) {
xmlOutput.openTag(MESSAGE_TAG);
xmlOutput.writeText(this.toString());
xmlOutput.closeTag(MESSAGE_TAG);
}
xmlOutput.closeTag(ELEMENT_NAME);
}
}
}
// vim:ts=4
|
package com.currencycloud.client;
import co.freeside.betamax.Betamax;
import co.freeside.betamax.MatchRule;
import com.currencycloud.client.model.Conversion;
import com.currencycloud.client.model.Settlement;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.math.BigDecimal;
import java.util.Collections;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
public class SettlementsTest extends BetamaxTestSupport {
private CurrencyCloudClient client;
@Before
public void prepareClient() {
client = prepareTestClient(null, null, "6f5f99d1b860fc47e8a186e3dce0d3f9");
}
@Before
@After
public void methodName() { log.debug("
@Test
@Betamax(tape = "can_add_conversion", match = {MatchRule.method, MatchRule.uri, MatchRule.body})
public void testCanAddConversionToSettlement() throws Exception {
Conversion conversion = Conversion.create();
conversion.setBuyCurrency("GBP");
conversion.setSellCurrency("USD");
conversion.setFixedSide("buy");
conversion.setAmount(new BigDecimal(1000));
conversion.setReason("mortgage payment");
conversion.setTermAgreement(true);
conversion = client.createConversion(conversion);
assertThat(conversion.getId(), equalTo("24d2ee7f-c7a3-4181-979e-9c58dbace992"));
assertThat(conversion.getSettlementDate(), equalTo(parseDateTime("2015-05-06T14:00:00+00:00")));
assertThat(conversion.getConversionDate(), equalTo(parseDateTime("2015-05-06T00:00:00+00:00")));
assertThat(conversion.getShortReference(), equalTo("20150504-PGRNVJ"));
assertThat(conversion.getCreatorContactId(), equalTo("c4d838e8-1625-44c6-a9fb-39bcb1fe353d"));
assertThat(conversion.getAccountId(), equalTo("8ec3a69b-02d1-4f09-9a6b-6bd54a61b3a8"));
assertThat(conversion.getCurrencyPair(), equalTo("GBPUSD"));
assertThat(conversion.getStatus(), equalTo("awaiting_funds"));
assertThat(conversion.getBuyCurrency(), equalTo("GBP"));
assertThat(conversion.getSellCurrency(), equalTo("USD"));
assertThat(conversion.getClientBuyAmount(), equalTo(new BigDecimal("1000.00")));
assertThat(conversion.getClientSellAmount(), equalTo(new BigDecimal("1511.70")));
assertThat(conversion.getFixedSide(), equalTo("buy"));
assertThat(conversion.getMidMarketRate(), equalTo(new BigDecimal("1.5118")));
assertThat(conversion.getCoreRate(), equalTo(new BigDecimal("1.5117")));
assertThat(conversion.getPartnerRate(), nullValue());
assertThat(conversion.getPartnerStatus(), equalTo("funds_arrived"));
assertThat(conversion.getPartnerBuyAmount(), equalTo(new BigDecimal("0.00")));
assertThat(conversion.getPartnerSellAmount(), equalTo(new BigDecimal("0.00")));
assertThat(conversion.getClientRate(), equalTo(new BigDecimal("1.5117")));
assertThat(conversion.getDepositRequired(), equalTo(false));
assertThat(conversion.getDepositAmount(), equalTo(new BigDecimal("0.00")));
assertThat(conversion.getDepositCurrency(), equalTo(""));
assertThat(conversion.getDepositStatus(), equalTo("not_required"));
assertThat(conversion.getDepositRequiredAt(), nullValue());
assertThat(conversion.getPaymentIds(), empty());
assertThat(conversion.getCreatedAt(), equalTo(parseDateTime("2015-05-04T20:28:29+00:00")));
assertThat(conversion.getUpdatedAt(), equalTo(parseDateTime("2015-05-04T20:28:29+00:00")));
Settlement settlement = client.createSettlement(Settlement.create());
Settlement updatedSettlement = client.addConversion(settlement.getId(), conversion.getId());
assertBasicPropertiesEqual(settlement, updatedSettlement);
assertThat(updatedSettlement.getConversionIds(), equalTo(Collections.singletonList("24d2ee7f-c7a3-4181-979e-9c58dbace992")));
Map<String, Settlement.Entry> entries = updatedSettlement.getEntries();
assertThat(entries, not(anEmptyMap()));
assertThat(entries, hasEntry("GBP", new Settlement.Entry(new BigDecimal("1000.00"), new BigDecimal("0.00"))));
assertThat(entries, hasEntry("USD", new Settlement.Entry(new BigDecimal("0.00"), new BigDecimal("1511.70"))));
assertThat(updatedSettlement.getUpdatedAt(), equalTo(parseDateTime("2015-05-04T20:40:56+00:00")));
System.out.println("Settlement toString: " + updatedSettlement.toString());
}
@Test
@Betamax(tape = "can_remove_conversion", match = {MatchRule.method, MatchRule.uri, MatchRule.body})
public void testCanRemoveConversionFromSettlement() throws Exception {
Settlement settlement = client.retrieveSettlement("63eeef54-3531-4e65-827a-7d0f37503fcc");
Settlement deletedSettlement = client.removeConversion(settlement.getId(), "24d2ee7f-c7a3-4181-979e-9c58dbace992");
assertThat(deletedSettlement, not(nullValue()));
assertThat(deletedSettlement.getType(), equalTo("bulk"));
assertThat(deletedSettlement.getCreatedAt(), equalTo(parseDateTime("2015-05-04T20:29:16+00:00")));
assertThat(deletedSettlement.getStatus(), equalTo("open"));
}
@Test
@Betamax(tape = "can_release", match = {MatchRule.method, MatchRule.uri, MatchRule.body})
public void testCanReleaseSettlement() throws Exception {
Settlement settlement = client.retrieveSettlement("51c619e0-0256-40ad-afba-ca4114b936f9");
Settlement releasedSettlement = client.releaseSettlement(settlement.getId());
assertBasicPropertiesEqual(settlement, releasedSettlement);
assertThat(releasedSettlement.getReleasedAt(), equalTo(parseDateTime("2015-05-04T21:44:23+00:00")));
assertThat(releasedSettlement.getStatus(), equalTo("released"));
}
@Test
@Betamax(tape = "can_unrelease", match = {MatchRule.method, MatchRule.uri, MatchRule.body})
public void testCanUnreleaseSettlement() throws Exception {
Settlement settlement = client.retrieveSettlement("51c619e0-0256-40ad-afba-ca4114b936f9");
Settlement unreleaseSettlement = client.unreleaseSettlement(settlement.getId());
assertBasicPropertiesEqual(settlement, unreleaseSettlement);
assertThat(unreleaseSettlement.getReleasedAt(), nullValue());
assertThat(unreleaseSettlement.getStatus(), equalTo("open"));
}
private static void assertBasicPropertiesEqual(Settlement settlement, Settlement updatedSettlement) {
assertThat(settlement, not(equalTo(updatedSettlement)));
assertThat(settlement.getId(), equalTo(updatedSettlement.getId()));
assertThat(settlement.getCreatedAt(), equalTo(updatedSettlement.getCreatedAt()));
assertThat(settlement.getShortReference(), equalTo(updatedSettlement.getShortReference()));
}
}
|
package fape.core.planning.planner;
import fape.core.planning.preprocessing.ActionSupporterFinder;
import fape.core.planning.preprocessing.LiftedDTG;
import fape.core.planning.preprocessing.Preprocessor;
import fape.core.planning.search.Handler;
import fape.core.planning.search.flaws.flaws.Flaw;
import fape.core.planning.search.flaws.flaws.Flaws;
import fape.core.planning.search.flaws.resolvers.Resolver;
import fape.core.planning.search.strategies.flaws.FlawCompFactory;
import fape.core.planning.search.strategies.plans.PlanCompFactory;
import fape.core.planning.search.strategies.plans.SeqPlanComparator;
import fape.core.planning.states.Printer;
import fape.core.planning.states.State;
import fape.core.planning.states.SearchNode;
import fape.drawing.gui.ChartWindow;
import fape.exceptions.FlawOrderingAnomaly;
import fape.exceptions.FlawWithNoResolver;
import fape.exceptions.ResolverResultedInInconsistency;
import fape.gui.SearchView;
import fape.util.TinyLogger;
import fape.util.Utils;
import fr.laas.fape.exceptions.InconsistencyException;
import planstack.anml.model.AnmlProblem;
import planstack.constraints.stnu.Controllability;
import java.util.*;
/**
* Base for any planner in FAPE.
* It is responsible for detecting and solving flaws and search procedures.
*
* Classes that inherit from it only have to implement the abstract methods to
* provide a search policy. Overriding methods can also be done to change the
* default behaviour.
*/
public class Planner {
public Planner(State initialState, PlanningOptions options) {
this.options = options;
this.pb = initialState.pb;
this.controllability = initialState.controllability;
this.dtg = new LiftedDTG(this.pb);
queue = new PriorityQueue<>(100, this.heuristicComputer().comparator(options));
SearchNode root = new SearchNode(initialState);
root.addOperation(s -> {
s.setPlanner(this);
s.notify(Handler.StateLifeTime.PRE_QUEUE_INSERTION);
});
queue.add(root);
if(options.displaySearch) {
searchView = new SearchView(this);
searchView.addNode(root);
}
}
public final PlanningOptions options;
public Preprocessor preprocessor;
public static boolean debugging = false;
public int numGeneratedStates = 1; //count the initial state
public int numExpandedStates = 0;
public int numFastForwardedStates = 0;
public final Controllability controllability;
public final AnmlProblem pb;
LiftedDTG dtg = null;
SearchView searchView = null;
public ActionSupporterFinder getActionSupporterFinder() {
return dtg;
}
private final PriorityQueue<SearchNode> queue;
/**
* All possible states of the planner.
*/
public enum EPlanState {
TIMEOUT, CONSISTENT, INCONSISTENT, INFEASIBLE
}
/**
* what is the current state of the plan
*/
public EPlanState planState = EPlanState.INCONSISTENT;
public List<Handler> getHandlers() { return options.handlers; }
public List<Flaw> getFlaws(SearchNode st) {
return st.getState().getFlaws(options.flawFinders, flawComparator(st.getState()));
}
/**
* Implementation of search. An easy thing to do to forward this call to the
* depthBoundedAStar method.
*
* @param deadline Absolute time (in ms) at which the planner must stop.
* @return A solution state if the planner found one. null otherwise.
*/
public State search(final long deadline) {
return search(deadline, Integer.MAX_VALUE, false);
}
/**
* @param deadline Absolute time (in ms) at which the planner must stop.
* @param maxDepth Will discard any partial plan which depth is greater than that.
* Note that the depth of a partial plan is computed with respect to
* initial state (i.e. repairing a state starts with a depth > 0)
* @param incrementalDeepening If set to true, the planner will increase the maximum
* allowed depth from 1 until maxDepth until a plan is found
* or the planner times out.
* @return A solution plan if the planner founds one. null otherwise.
* Also check the "planState" field for more detailed information.
*/
public State search(final long deadline, final int maxDepth, final boolean incrementalDeepening) {
if (options.useAEpsilon) {
return aEpsilonSearch(deadline, maxDepth, incrementalDeepening);
} else {
return bestFirstSearch(deadline, maxDepth, incrementalDeepening);
}
}
public State bestFirstSearch(final long deadline, final int maxDepth, final boolean incrementalDeepening){
List<SearchNode> toRestore = new LinkedList<>(queue);
int currentMaxDepth;
if(incrementalDeepening)
currentMaxDepth = 1;
else
currentMaxDepth = maxDepth;
State solution = null;
while(currentMaxDepth <= maxDepth && solution == null && planState != EPlanState.TIMEOUT)
{
queue.clear();
queue.addAll(toRestore);
solution = depthBoundedAStar(deadline, currentMaxDepth);
if (currentMaxDepth == Integer.MAX_VALUE) // make sure we don't overflow
break;
currentMaxDepth += 1;
if (debugging && incrementalDeepening)
System.out.println("Current max depth: "+currentMaxDepth+". Expanded nodes: "+ numExpandedStates);
}
return solution;
}
/**
* Provides a comparator that is used to sort flaws. THe first flaw will be
* selected to be resolved.
*
* @param st State in which the flaws appear.
* @return The comparator to use for ordering.
*/
private Comparator<Flaw> flawComparator(State st) {
return FlawCompFactory.get(st, this, options.flawSelStrategies);
}
private SeqPlanComparator heuristic = null;
/**
* The comparator used to order the queue. THe first state in the queue
* (according to this comparator, will be selected for expansion.
*
* @return The comparator to use for ordering the queue.
*/
public final SeqPlanComparator heuristicComputer() {
if(heuristic == null) {
heuristic = PlanCompFactory.get(this, options.planSelStrategies);
}
return heuristic;
}
/**
* Checks if two temporal databases are threatening each others. It is the
* case if: - both are not consuming databases (start with an assignment).
* Otherwise, threat is naturally handled by looking for supporters. - their
* state variables are unifiable. - they can overlap
*
* @return True there is a threat.
*/
private State depthBoundedAStar(final long deadLine, final int maxDepth) {
while (true) {
if (System.currentTimeMillis() > deadLine) {
TinyLogger.LogInfo("Timeout.");
this.planState = EPlanState.TIMEOUT;
return null;
}
if (queue.isEmpty()) {
this.planState = EPlanState.INFEASIBLE;
return null;
}
//get the best state and continue the search
SearchNode st = queue.remove();
try {
// let all handlers know that this state was selected for expansion
for (Handler h : options.handlers)
h.addOperation(st, Handler.StateLifeTime.SELECTION, this);
if (!st.getState().isConsistent()) {
if (options.displaySearch)
searchView.setDeadEnd(st);
continue;
}
List<Flaw> flaws = getFlaws(st);
if (flaws.isEmpty()) {
// this is a solution state
if (options.displaySearch)
searchView.setSolution(st);
this.planState = EPlanState.CONSISTENT;
TinyLogger.LogInfo("Plan found:");
TinyLogger.LogInfo(st.getState());
return st.getState();
} else if (st.getDepth() < maxDepth) {
List<SearchNode> children = expand(st);
for (SearchNode child : children) {
queue.add(child);
}
}
} catch (InconsistencyException e) {
if(options.displaySearch) {
searchView.setDeadEnd(st);
}
}
}
}
/**
* Expand a partial plan by selecting a flaw and generating resolvers for this flaw.
* @param st Partial plan to expand
* @return All consistent children as a result of the expansion.
*/
private List<SearchNode> expand(SearchNode st) {
try {
if (options.displaySearch)
searchView.setCurrentFocus(st);
if(TinyLogger.logging) {
TinyLogger.LogInfo(Printer.temporalDatabaseManager(st.getState()));
}
assert st.getState().isConsistent() : "Expand was given an inconsistent state.";
if (st.getDepth() == 0 && st.getState().addableActions != null) {
assert st.getState().getAllActions().isEmpty();
preprocessor.restrictPossibleActions(st.getState().addableActions);
}
numExpandedStates++;
TinyLogger.LogInfo(st.getState(), "\nCurrent state: [%s]", st.getID());
List<Flaw> flaws = getFlaws(st);
assert !flaws.isEmpty() : "Cannot expand a flaw free state. It is already a solution.";
// just take the first flaw and its resolvers (unless the flaw is chosen on command line)
Flaw f;
if (options.chooseFlawManually) {
System.out.print("STATE :" + st.getID() + "\n");
System.out.println(Printer.temporalDatabaseManager(st.getState()));
for (int i = 0; i < flaws.size(); i++)
System.out.println("[" + i + "] " + Printer.p(st.getState(), flaws.get(i)));
int choosen = Utils.readInt();
f = flaws.get(choosen);
} else {
f = flaws.get(0);
}
List<Resolver> resolvers = f.getResolvers(st.getState(), this);
// make sure resolvers are always in the same order (for reproducibility)
Collections.sort(resolvers);
if (options.displaySearch)
searchView.setProperty(st, SearchView.SELECTED_FLAW, Printer.p(st.getState(), f));
if (resolvers.isEmpty()) {
// dead end, keep going
TinyLogger.LogInfo(st.getState(), " Dead-end, flaw without resolvers: %s", flaws.get(0));
if (options.displaySearch) {
searchView.setProperty(st, SearchView.COMMENT, " Dead-end, flaw without resolvers: " + flaws.get(0));
searchView.setDeadEnd(st);
}
st.setExpanded();
return Collections.emptyList();
}
TinyLogger.LogInfo(st.getState(), " Flaw: %s", f);
List<SearchNode> children = new LinkedList<>();
final Object flawsHash = Flaws.hash(flaws);
// compute all valid children
for (int resolverID = 0; resolverID < resolvers.size(); resolverID++) {
SearchNode next = new SearchNode(st);
final int currentResolver = resolverID;
try {
next.addOperation(s -> {
List<Flaw> fs = s.getFlaws(options.flawFinders, flawComparator(s));
// assert Flaws.hash(fs).equals(flawsHash) : "There is a problem with the generated flaws.";
Flaw selectedFlaw = fs.get(0);
List<Resolver> possibleResolvers = selectedFlaw.getResolvers(s, this);
Collections.sort(possibleResolvers);
Resolver res;
try {
res = possibleResolvers.get(currentResolver);
} catch (IndexOutOfBoundsException e) {
// apparently we tried to access a resolver that did not exists, either a
// resolver disapeared or the flaw is not the one we expected
throw new FlawOrderingAnomaly(flaws, 0, currentResolver);
}
if (!applyResolver(s, res, false))
s.setDeadEnd();
else {
s.checkConsistency();
if (s.isConsistent() && options.useFastForward)
fastForward(s, 10);
}
s.checkConsistency();
s.notify(Handler.StateLifeTime.PRE_QUEUE_INSERTION);
});
boolean success = next.getState().isConsistent();
String hrComment = "";
if (!success)
hrComment = "Non consistent resolver application or error while fast-forwarding.";
if (success) {
children.add(next);
numGeneratedStates++;
} else {
TinyLogger.LogInfo(st.getState(), " Dead-end reached for state: %s", next.getID());
//inconsistent state, doing nothing
}
if (options.displaySearch) {
searchView.addNode(next);
if (!success)
searchView.setDeadEnd(next);
searchView.setProperty(next, SearchView.LAST_APPLIED_RESOLVER, Printer.p(st.getState(), resolvers.get(currentResolver)));
searchView.setProperty(next, SearchView.COMMENT, hrComment);
}
} catch (InconsistencyException e) {
if(options.displaySearch) {
searchView.addNode(next);
searchView.setDeadEnd(next);
searchView.setProperty(next, SearchView.LAST_APPLIED_RESOLVER, Printer.p(st.getState(), resolvers.get(currentResolver)));
searchView.setProperty(next, SearchView.COMMENT, e.toString());
}
}
}
st.setExpanded();
return children;
} catch (InconsistencyException e) {
if(options.displaySearch) {
searchView.setDeadEnd(st);
searchView.setProperty(st, SearchView.COMMENT, e.toString());
}
return Collections.emptyList();
}
}
/**
* This function looks at flaws and resolvers in the state and fixes flaws with a single resolver.
* It does that at most "maxForwardState"
*/
private boolean fastForward(State st, int maxForwardStates) {
if(maxForwardStates == 0)
return true;
List<Flaw> flaws = st.getFlaws(options.flawFinders, flawComparator(st));
if (flaws.isEmpty()) {
return true;
}
//we just take the first flaw and its resolvers
Flaw flaw = flaws.get(0);
List<Resolver> resolvers = flaw.getResolvers(st, this);
if (resolvers.isEmpty()) {
throw new FlawWithNoResolver(flaw);
}
if(resolvers.size() == 1) {
Resolver res = resolvers.get(0);
if(!applyResolver(st, res, true))
throw new ResolverResultedInInconsistency(flaw, res);
else
st.checkConsistency();
TinyLogger.LogInfo(st, " [%s] ff: Adding %s", st.mID, res);
if(st.isConsistent()) {
numFastForwardedStates++;
return fastForward(st, maxForwardStates-1);
} else {
throw new ResolverResultedInInconsistency(flaw, res);
}
} else {
// nothing was done
return true;
}
}
/**
* Applies a resolver to the given state. This state will be modified to integrate the resolver.
*
* @param st State to modify
* @param resolver Resolver to apply
* @return True if the resolver was successfully applied and the resulting state is consistent.
* False otherwise.
*/
private boolean applyResolver(State st, Resolver resolver, boolean isFastForwarding) {
boolean result = resolver.apply(st, this, isFastForwarding) && st.csp.propagateMixedConstraints() && st.checkConsistency();
return result;
}
private State aEpsilonSearch(final long deadLine, final int maxDepth, final boolean incrementalDeepening) {
assert !incrementalDeepening : "Incremental Deepening is not supported in A-Epsilon search.";
// stores the admissible children of the last expanded node.
PriorityQueue<SearchNode> AX = new PriorityQueue<SearchNode>(10, heuristicComputer().comparator(options));
if (queue.isEmpty()) {
this.planState = EPlanState.INFEASIBLE;
TinyLogger.LogInfo("Initially empty queue.");
return null;
}
double fThreshold = (1f + options.epsilon) * f(queue.peek());
int numStatesExploredInDepth = 0;
int numStatesToExploreInBest = 0;
while (true) {
if (System.currentTimeMillis() > deadLine) {
TinyLogger.LogInfo("Timeout.");
this.planState = EPlanState.TIMEOUT;
return null;
}
if (queue.isEmpty()) {
this.planState = EPlanState.INFEASIBLE;
TinyLogger.LogInfo("Empty queue.");
return null;
}
SearchNode current;
if(AX.isEmpty() || numStatesToExploreInBest > 0) {
if(numStatesExploredInDepth > 0) {
numStatesToExploreInBest = Math.round(numStatesExploredInDepth / options.depthShallowRatio);
// we should keep expanding least-cost nodes for
numStatesExploredInDepth = 0;
}
// get best in open
current = queue.poll();
numStatesToExploreInBest
} else {
numStatesExploredInDepth++;
// still interesting states (below threshold) in the successors of the previously expanded state
current = AX.poll();
queue.remove(current);
}
try {
// let all handlers know that this state was selected for expansion
for(Handler h : options.handlers)
h.addOperation(current, Handler.StateLifeTime.SELECTION, this);
if(!current.getState().isConsistent())
throw new InconsistencyException();
if (current.getState().isSolution(options.flawFinders)) {
// this is a solution state
if(options.displaySearch)
searchView.setSolution(current);
this.planState = EPlanState.CONSISTENT;
TinyLogger.LogInfo("Plan found:");
TinyLogger.LogInfo(current.getState());
return current.getState();
} else if(current.getDepth() < maxDepth) {
// expand the state
List<SearchNode> children = expand(current);
AX.clear();
for(SearchNode child : children) {
queue.add(child);
// add admissible children to AX for next iteration
if(f(child) < fThreshold) {
AX.add(child);
}
}
}
// update the threshold, since our heuristic is not admissible, we do not take
// the max of fthreshold and (1+e)*f(best in open)
if(!queue.isEmpty())
fThreshold = (1f + options.epsilon) * f(queue.peek());
} catch (InconsistencyException e) {
if(options.displaySearch)
searchView.setDeadEnd(current);
TinyLogger.LogInfo("\nCurrent state: ["+current.getID()+"]");
TinyLogger.LogInfo(" Non consistent");
}
}
}
public double h(SearchNode st){ return options.heuristicWeight * heuristic.h(st); }
public double g(SearchNode st){ return heuristic.g(st); }
public double f(SearchNode st) { return g(st) + h(st); }
private ChartWindow chartWindow = null;
public void drawState(State st) {
if(chartWindow == null)
chartWindow = new ChartWindow("Actions");
chartWindow.draw(st.getCanvasOfActions());
}
}
|
package io.advantageous.qbit.server;
import io.advantageous.qbit.QBit;
import io.advantageous.qbit.annotation.RequestMethod;
import io.advantageous.qbit.http.*;
import io.advantageous.qbit.message.MethodCall;
import io.advantageous.qbit.message.Response;
import io.advantageous.qbit.queue.ReceiveQueue;
import io.advantageous.qbit.queue.ReceiveQueueListener;
import io.advantageous.qbit.queue.ReceiveQueueManager;
import io.advantageous.qbit.queue.impl.BasicReceiveQueueManager;
import io.advantageous.qbit.service.ServiceBundle;
import io.advantageous.qbit.spi.ProtocolEncoder;
import io.advantageous.qbit.util.Timer;
import org.boon.Str;
import org.boon.StringScanner;
import org.boon.core.reflection.AnnotationData;
import org.boon.core.reflection.ClassMeta;
import org.boon.core.reflection.MethodAccess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import static org.boon.Boon.puts;
import static org.boon.Exceptions.die;
public class Server {
protected ProtocolEncoder encoder;
protected HttpServer httpServer;
protected ServiceBundle serviceBundle;
private Set<String> getMethodURIs = new LinkedHashSet<>();
private Set<String> postMethodURIs = new LinkedHashSet<>();
private final Logger logger = LoggerFactory.getLogger(Server.class);
private final Timer timer = Timer.timer();
private Map<String, WebsSocketSender> webSocketMap = new ConcurrentHashMap<>();
private Map<String, HttpResponse> responseMap = new ConcurrentHashMap<>();
private ReceiveQueue<Response<Object>> responses;
private AtomicBoolean stop = new AtomicBoolean();
private ScheduledExecutorService monitor;
private ScheduledFuture<?> future;
private ReceiveQueueManager<Response<Object>> receiveQueueManager;
private AtomicLong lastFlushTime = new AtomicLong();
protected void initServices(Set<Object> services) {
for (Object service : services) {
puts(service.getClass().getName());
serviceBundle.addService(service);
this.addRestSupportFor(service.getClass(), serviceBundle.address());
}
}
public void stop() {
stop.set(true);
try {
if (future != null) {
future.cancel(true);
}
}catch (Exception ex) {
logger.warn("shutting down", ex);
}
try {
if (monitor!=null) {
monitor.shutdown();
}
}catch (Exception ex) {
logger.warn("shutting down", ex);
}
}
public void run() {
serviceBundle.startReturnHandlerProcessor();
httpServer.setHttpRequestConsumer((final HttpRequest request) -> {
handleRestCall(request);
});
httpServer.setWebSocketMessageConsumer((final WebSocketMessage webSocketMessage) -> {
handleWebSocketCall(webSocketMessage);
});
startResponseQueueListener();
httpServer.run();
}
private void startResponseQueueListener() {
receiveQueueManager = new BasicReceiveQueueManager<>();
monitor = Executors.newScheduledThreadPool(1,
runnable -> {
Thread thread = new Thread(runnable);
thread.setName("QBit Server");
return thread;
}
);
responses = serviceBundle.responses();
stop.set(false);
future = monitor.scheduleAtFixedRate(() -> {
try {
receiveQueueManager.manageQueue(responses, queueListener(), 50, stop);
} catch (Exception ex) {
logger.error(this.getClass().getName() + " Problem running queue manager", ex);
}
}, 50, 50, TimeUnit.MILLISECONDS);
}
private ReceiveQueueListener<Response<Object>> queueListener() {
return new ReceiveQueueListener<Response<Object>>() {
@Override
public void receive(final Response<Object> response) {
handleResponseFromServiceBundle(response);
}
@Override
public void empty() {
handleServiceBundleFlush();
}
@Override
public void limit() {
}
@Override
public void shutdown() {
}
@Override
public void idle() {
handleServiceBundleFlush();
}
};
}
private void handleServiceBundleFlush() {
long now = timer.now();
/* Force a flush every 10 milliseconds. */
if (now > lastFlushTime.get() + 10L) {
serviceBundle.flushSends();
}
}
private void handleResponseFromServiceBundle(Response<Object> response) {
String address = response.returnAddress();
HttpResponse httpResponse = responseMap.get(address);
String responseAsText = encoder.encodeAsString(response);
if (httpResponse != null) {
httpResponse.response(200, "application/json", responseAsText);
} else {
WebsSocketSender webSocket = webSocketMap.get(address);
if (webSocket != null) {
webSocket.send(responseAsText);
}
}
}
private void addRestSupportFor(Class cls, String baseURI) {
System.out.println("addRestSupportFor " + cls.getName());
ClassMeta classMeta = ClassMeta.classMeta(cls);
Map<String, Object> requestMapping = classMeta.annotation("RequestMapping").getValues();
if ( requestMapping == null ) {
return;
}
String serviceURI = ((String[])requestMapping.get("value"))[0];
Iterable<MethodAccess> methods = classMeta.methods();
registerMethodsToEndPoints(baseURI, serviceURI, methods);
}
private void registerMethodsToEndPoints(String baseURI, String serviceURI, Iterable<MethodAccess> methods) {
for (MethodAccess method : methods) {
if (!method.isPublic() || method.method().getName().contains("$")) continue;
if (!method.hasAnnotation("RequestMapping")) {
continue;
}
registerMethodToEndPoint(baseURI, serviceURI, method);
}
}
private void registerMethodToEndPoint(String baseURI, String serviceURI, MethodAccess method) {
AnnotationData data = method.annotation("RequestMapping");
Map<String, Object> methodValuesForAnnotation = data.getValues();
if (data == null) return;
String methodURI = extractMethodURI(methodValuesForAnnotation);
RequestMethod httpMethod = extractHttpMethod(methodValuesForAnnotation);
switch (httpMethod) {
case GET:
getMethodURIs.add(Str.add(baseURI, serviceURI, methodURI));
break;
case POST:
postMethodURIs.add(Str.add(baseURI, serviceURI, methodURI));
break;
default:
die("Not supported yet HTTP METHOD", httpMethod, methodURI);
}
}
private RequestMethod extractHttpMethod(Map<String, Object> methodValuesForAnnotation) {
RequestMethod httpMethod = null;
RequestMethod[] httpMethods = (RequestMethod[]) methodValuesForAnnotation.get("method");
if (httpMethods != null && httpMethods.length > 0 ) {
httpMethod = httpMethods[0];
}
httpMethod = httpMethod==null ? RequestMethod.GET : httpMethod;
return httpMethod;
}
private String extractMethodURI(Map<String, Object> methodValuesForAnnotation) {
String [] values = (String[]) methodValuesForAnnotation.get("value");
String methodURI = values[0];
if (methodURI.contains("{")) {
methodURI = StringScanner.split(methodURI, '{', 1)[0];
}
return methodURI;
}
private void handleRestCall(HttpRequest request) {
puts(request);
boolean knownURI = false;
String uri = request.getUri();
switch (request.getMethod()) {
case "GET":
knownURI = getMethodURIs.contains(uri);
break;
case "POST":
knownURI = postMethodURIs.contains(uri);
break;
}
if (!knownURI) {
request.getResponse().response(404, "application/json", Str.add("No service method for URI ", request.getUri()) );
}
MethodCall<Object> methodCall =
QBit.factory().createMethodCallToBeParsedFromBody(request.getUri(),
request.getRemoteAddress(),
null,
null,request.getBody(), request.getParams()
);
puts("RETURN ADDRESS" , methodCall.returnAddress());
serviceBundle.call(methodCall);
responseMap.put(request.getRemoteAddress(), request.getResponse());
}
private void handleWebSocketCall(final WebSocketMessage webSocketMessage) {
puts(webSocketMessage);
final MethodCall<Object> methodCall = QBit.factory().createMethodCallToBeParsedFromBody(webSocketMessage.getRemoteAddress(),
webSocketMessage.getMessage());
serviceBundle.call(methodCall);
webSocketMap.put(methodCall.returnAddress(), webSocketMessage.getSender());
}
}
|
package de.undercouch.citeproc;
import de.undercouch.citeproc.csl.CSLCitation;
import de.undercouch.citeproc.csl.CSLCitationItem;
import de.undercouch.citeproc.csl.CSLCitationItemBuilder;
import de.undercouch.citeproc.csl.CSLItemData;
import de.undercouch.citeproc.csl.CSLItemDataBuilder;
import de.undercouch.citeproc.csl.internal.GeneratedCitation;
import de.undercouch.citeproc.csl.internal.RenderContext;
import de.undercouch.citeproc.csl.internal.SSort;
import de.undercouch.citeproc.csl.internal.SStyle;
import de.undercouch.citeproc.csl.internal.format.Format;
import de.undercouch.citeproc.csl.internal.format.HtmlFormat;
import de.undercouch.citeproc.csl.internal.format.TextFormat;
import de.undercouch.citeproc.csl.internal.locale.LLocale;
import de.undercouch.citeproc.helper.CSLUtils;
import de.undercouch.citeproc.output.Bibliography;
import de.undercouch.citeproc.output.Citation;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.StringReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.IntStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
public class CSL {
/**
* The output format
*/
private Format outputFormat = new HtmlFormat();
/**
* {@code true} if the processor should convert URLs and DOIs in the output
* to links.
* @see #setConvertLinks(boolean)
*/
private boolean convertLinks = false;
/**
* The CSL style used to render citations and bibliographies
*/
private final SStyle style;
/**
* The localization data used to render citations and bibliographies
*/
private final LLocale locale;
/**
* An object that provides citation item data
*/
private final ItemDataProvider itemDataProvider;
/**
* Citation items registered through {@link #registerCitationItems(String...)}
*/
private final Map<String, CSLItemData> registeredItems = new LinkedHashMap<>();
/**
* Contains the same items as {@link #registeredItems} but sorted
*/
private final List<CSLItemData> sortedItems = new ArrayList<>();
/**
* A list of generated citations sorted by their index
*/
private List<GeneratedCitation> generatedCitations = new ArrayList<>();
/**
* Constructs a new citation processor
* @param itemDataProvider an object that provides citation item data
* @param style the citation style to use. May either be a serialized
* XML representation of the style or a style's name such as <code>ieee</code>.
* In the latter case, the processor loads the style from the classpath (e.g.
* <code>/ieee.csl</code>)
* @throws IOException if the CSL style could not be loaded
*/
public CSL(ItemDataProvider itemDataProvider, String style) throws IOException {
this(itemDataProvider, new DefaultLocaleProvider(),
new DefaultAbbreviationProvider(), style, "en-US");
}
/**
* Constructs a new citation processor
* @param itemDataProvider an object that provides citation item data
* @param style the citation style to use. May either be a serialized
* XML representation of the style or a style's name such as <code>ieee</code>.
* In the latter case, the processor loads the style from the classpath (e.g.
* <code>/ieee.csl</code>)
* @param lang an RFC 4646 identifier for the citation locale (e.g. <code>en-US</code>)
* @throws IOException if the CSL style could not be loaded
*/
public CSL(ItemDataProvider itemDataProvider, String style, String lang) throws IOException {
this(itemDataProvider, new DefaultLocaleProvider(),
new DefaultAbbreviationProvider(), style, lang);
}
/**
* Constructs a new citation processor
* @param itemDataProvider an object that provides citation item data
* @param localeProvider an object that provides CSL locales
* @param abbreviationProvider an object that provides abbreviations
* @param style the citation style to use. May either be a serialized
* XML representation of the style or a style's name such as <code>ieee</code>.
* In the latter case, the processor loads the style from the classpath (e.g.
* <code>/ieee.csl</code>)
* @param lang an RFC 4646 identifier for the citation locale (e.g. <code>en-US</code>)
* @throws IOException if the CSL style could not be loaded
*/
public CSL(ItemDataProvider itemDataProvider, LocaleProvider localeProvider,
AbbreviationProvider abbreviationProvider, String style,
String lang) throws IOException {
// load style if needed
if (!isStyle(style)) {
style = loadStyle(style);
}
this.itemDataProvider = itemDataProvider;
// TODO parse style and locale directly from URL if possible
// TODO instead of loading them into strings first
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder;
try {
builder = factory.newDocumentBuilder();
} catch (ParserConfigurationException e) {
throw new IOException("Could not create document builder", e);
}
// load style
Document styleDocument;
try {
styleDocument = builder.parse(new InputSource(
new StringReader(style)));
} catch (SAXException e) {
throw new IOException("Could not parse style", e);
}
this.style = new SStyle(styleDocument);
// load locale
String strLocale = localeProvider.retrieveLocale(lang);
Document localeDocument;
try {
localeDocument = builder.parse(new InputSource(
new StringReader(strLocale)));
} catch (SAXException e) {
throw new IOException("Could not parse locale", e);
}
LLocale locale = new LLocale(localeDocument);
if (this.style.getLocale() != null &&
(this.style.getLocale().getLang() == null ||
(this.style.getLocale().getLang().getLanguage().equals(locale.getLang().getLanguage()) &&
(this.style.getLocale().getLang().getCountry().isEmpty() ||
this.style.getLocale().getLang().getCountry().equals(locale.getLang().getCountry()))))) {
// additional localization data in the style file overrides or
// augments the data from the locale file
this.locale = locale.merge(this.style.getLocale());
} else {
this.locale = locale;
}
}
/**
* Get a list of supported output formats
* @return the formats
*/
public static List<String> getSupportedOutputFormats() {
return Arrays.asList("text", "html");
}
private static Set<String> getAvailableFiles(String prefix,
String knownName, String extension) throws IOException {
Set<String> result = new LinkedHashSet<>();
// first load a file that is known to exist
String name = prefix + knownName + "." + extension;
URL knownUrl = CSL.class.getResource("/" + name);
if (knownUrl != null) {
String path = knownUrl.getPath();
// get the jar file containing the file
if (path.endsWith(".jar!/" + name)) {
String jarPath = path.substring(0, path.length() - name.length() - 2);
URI jarUri;
try {
jarUri = new URI(jarPath);
} catch (URISyntaxException e) {
// ignore
return result;
}
try (ZipFile zip = new ZipFile(new File(jarUri))) {
Enumeration<? extends ZipEntry> entries = zip.entries();
while (entries.hasMoreElements()) {
ZipEntry e = entries.nextElement();
if (e.getName().endsWith("." + extension) &&
(prefix.isEmpty() || e.getName().startsWith(prefix))) {
result.add(e.getName().substring(
prefix.length(), e.getName().length() - 4));
}
}
}
}
}
return result;
}
/**
* Calculates a list of available citation styles
* @return the list
* @throws IOException if the citation styles could not be loaded
*/
public static Set<String> getSupportedStyles() throws IOException {
return getAvailableFiles("", "ieee", "csl");
}
/**
* Checks if a given citation style is supported
* @param style the citation style's name
* @return true if the style is supported, false otherwise
*/
public static boolean supportsStyle(String style) {
String styleFileName = style;
if (!styleFileName.endsWith(".csl")) {
styleFileName = styleFileName + ".csl";
}
if (!styleFileName.startsWith("/")) {
styleFileName = "/" + styleFileName;
}
URL url = CSL.class.getResource(styleFileName);
return (url != null);
}
/**
* Calculates a list of available citation locales
* @return the list
* @throws IOException if the citation locales could not be loaded
*/
public static Set<String> getSupportedLocales() throws IOException {
return getAvailableFiles("locales-", "en-US", "xml");
}
/**
* Checks if the given String contains the serialized XML representation
* of a style
* @param style the string to examine
* @return true if the String is XML, false otherwise
*/
private boolean isStyle(String style) {
for (int i = 0; i < style.length(); ++i) {
char c = style.charAt(i);
if (!Character.isWhitespace(c)) {
return (c == '<');
}
}
return false;
}
/**
* Loads a CSL style from the classpath. For example, if the given name
* is <code>ieee</code> this method will load the file <code>/ieee.csl</code>
* @param styleName the style's name
* @return the serialized XML representation of the style
* @throws IOException if the style could not be loaded
*/
private String loadStyle(String styleName) throws IOException {
URL url;
if (styleName.startsWith("http:
try {
// try to load matching style from classpath
return loadStyle(styleName.substring(styleName.lastIndexOf('/') + 1));
} catch (FileNotFoundException e) {
// there is no matching style in classpath
url = new URL(styleName);
}
} else {
// normalize file name
if (!styleName.endsWith(".csl")) {
styleName = styleName + ".csl";
}
if (!styleName.startsWith("/")) {
styleName = "/" + styleName;
}
// try to find style in classpath
url = getClass().getResource(styleName);
if (url == null) {
throw new FileNotFoundException("Could not find style in "
+ "classpath: " + styleName);
}
}
// load style
String result = CSLUtils.readURLToString(url, "UTF-8");
// handle dependent styles
if (isDependent(result)) {
String independentParentLink;
try {
independentParentLink = getIndependentParentLink(result);
} catch (ParserConfigurationException | IOException | SAXException e) {
throw new IOException("Could not load independent parent style", e);
}
if (independentParentLink == null) {
throw new IOException("Dependent style does not have an "
+ "independent parent");
}
return loadStyle(independentParentLink);
}
return result;
}
/**
* Test if the given string represents a dependent style
* @param style the style
* @return true if the string is a dependent style, false otherwise
*/
private boolean isDependent(String style) {
if (!style.trim().startsWith("<")) {
return false;
}
Pattern p = Pattern.compile("rel\\s*=\\s*\"\\s*independent-parent\\s*\"");
Matcher m = p.matcher(style);
return m.find();
}
/**
* Parse a string representing a dependent parent style and
* get link to its independent parent style
* @param style the dependent style
* @return the link to the parent style or <code>null</code> if the link
* could not be found
* @throws ParserConfigurationException if the XML parser could not be created
* @throws IOException if the string could not be read
* @throws SAXException if the string could not be parsed
*/
public String getIndependentParentLink(String style)
throws ParserConfigurationException, IOException, SAXException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
InputSource src = new InputSource(new StringReader(style));
Document doc = builder.parse(src);
NodeList links = doc.getElementsByTagName("link");
for (int i = 0; i < links.getLength(); ++i) {
Node n = links.item(i);
Node relAttr = n.getAttributes().getNamedItem("rel");
if (relAttr != null) {
if ("independent-parent".equals(relAttr.getTextContent())) {
Node hrefAttr = n.getAttributes().getNamedItem("href");
if (hrefAttr != null) {
return hrefAttr.getTextContent();
}
}
}
}
return null;
}
/**
* Sets the processor's output format
* @param format the format (one of {@code "html"}, {@code "text"},
* {@code "asciidoc"}, {@code "fo"}, or {@code "rtf"}
*/
public void setOutputFormat(String format) {
if ("text".equals(format)) {
setOutputFormat(new TextFormat());
} else if ("html".equals(format)) {
setOutputFormat(new HtmlFormat());
} else {
throw new IllegalArgumentException("Unknown output format: `" +
format + "'. Supported formats: `text', `html'.");
}
}
/**
* Sets the processor's output format
* @param outputFormat the format
*/
public void setOutputFormat(Format outputFormat) {
this.outputFormat = outputFormat;
outputFormat.setConvertLinks(convertLinks);
}
/**
* Specifies if the processor should convert URLs and DOIs in the output
* to links. How links are created depends on the output format that has
* been set with {@link #setOutputFormat(String)}
* @param convert true if URLs and DOIs should be converted to links
*/
public void setConvertLinks(boolean convert) {
convertLinks = convert;
outputFormat.setConvertLinks(convert);
}
/**
* Enables the abbreviation list with the given name. The processor will
* call {@link AbbreviationProvider#getAbbreviations(String)} with the
* given String to get the abbreviations that should be used from here on.
* @param name the name of the abbreviation list to enable
*/
public void setAbbreviations(String name) {
throw new IllegalArgumentException("Abbreviations are not supported yet.");
}
/**
* Fetches the item data for the given citation items and adds it to
* {@link #registeredItems}. Also, sorts the items according to the sorting
* specified in the style's bibliography element and stores the result in
* {@link #sortedItems}. If the style does not have a bibliography element
* or no sorting is specified, the items will just be appended to
* {@link #sortedItems}. In addition, the method updates any items already
* stored in {@link #sortedItems} and coming after the generated ones.
* Updated items will be returned in the given {@code updatedItems} set
* (unless it is {@code null}).
* @param ids the IDs of the citation items to register
* @param updatedItems an empty set that will be filled with the citation
* items the method had to update (may be {@code null})
* @param unsorted {@code true} if any sorting specified in the style
* should be ignored
* @return a list of registered citation item data
*/
private List<CSLItemData> registerItems(String[] ids,
Set<CSLItemData> updatedItems, boolean unsorted) {
List<CSLItemData> result = new ArrayList<>();
SSort.SortComparator comparator = null;
for (String id : ids) {
// check if item has already been registered
CSLItemData itemData = registeredItems.get(id);
if (itemData != null) {
result.add(itemData);
continue;
}
// fetch item data
itemData = itemDataProvider.retrieveItem(id);
if (itemData == null) {
throw new IllegalArgumentException("Missing citation " +
"item with ID: " + id);
}
// register item
if (unsorted || style.getBibliography() == null ||
style.getBibliography().getSort() == null) {
// We don't have to sort. Add item to the end of the list.
itemData = new CSLItemDataBuilder(itemData)
.citationNumber(String.valueOf(registeredItems.size() + 1))
.build();
sortedItems.add(itemData);
} else {
// We have to sort. Find insert point.
if (comparator == null) {
comparator = style.getBibliography().getSort()
.comparator(style, locale);
}
int i = Collections.binarySearch(sortedItems, itemData, comparator);
if (i < 0) {
i = -(i + 1);
} else {
// binarySearch thinks we found the item in the list but
// this is impossible. It's more likely that the comparator
// returned 0 because no key was given or it did not yield
// sensible results. Just append the item to the list.
i = sortedItems.size();
}
// determine citation number depending on sort direction
int citationNumber;
int citationNumberDirection = comparator.getCitationNumberDirection();
if (citationNumberDirection > 0) {
citationNumber = i + 1;
} else {
citationNumber = sortedItems.size() + 1 - i;
}
// create new item data with citation data and add it to
// the list of sorted items
itemData = new CSLItemDataBuilder(itemData)
.citationNumber(String.valueOf(citationNumber))
.build();
sortedItems.add(i, itemData);
// determine if we need to update the following items or
// the preceding ones (depending on the sort direction)
IntStream idStream;
if (citationNumberDirection > 0) {
idStream = IntStream.range(i + 1, sortedItems.size());
} else {
int e = i;
idStream = IntStream.range(0, e).map(n -> e - 1 - n);
}
// update the other items if necessary
idStream.forEach(j -> {
CSLItemData item2 = sortedItems.get(j);
// determine new citation number
int citationNumber2;
if (citationNumberDirection > 0) {
citationNumber2 = j + 1;
} else {
citationNumber2 = sortedItems.size() - j;
}
// create new item data with new citation number
item2 = new CSLItemDataBuilder(item2)
.citationNumber(String.valueOf(citationNumber2))
.build();
// overwrite existing item data
sortedItems.set(j, item2);
registeredItems.put(item2.getId(), item2);
// store updated item
if (updatedItems != null) {
updatedItems.add(item2);
}
});
}
// save registered item data
registeredItems.put(itemData.getId(), itemData);
result.add(itemData);
}
return result;
}
public void registerCitationItems(String... ids) {
registerCitationItems(ids, false);
}
public void registerCitationItems(String[] ids, boolean unsorted) {
registeredItems.clear();
sortedItems.clear();
registerItems(ids, null, unsorted);
}
/**
* Get an unmodifiable collection of all citation items that have been
* registered with the processor so far
* @return the registered citation items
*/
public Collection<CSLItemData> getRegisteredItems() {
return Collections.unmodifiableCollection(sortedItems);
}
public List<Citation> makeCitation(String... ids) {
CSLCitationItem[] items = new CSLCitationItem[ids.length];
for (int i = 0; i < ids.length; ++i) {
items[i] = new CSLCitationItem(ids[i]);
}
return makeCitation(new CSLCitation(items));
}
/**
* Perform steps to prepare the given citation for rendering. Register
* citation items and sort them. Return a prepared citation that can be
* passed to {@link #renderCitation(CSLCitation)}
* @param citation the citation to render
* @param updatedItems an empty set that will be filled with citation
* items that had to be updated while rendering the given one (may be
* {@code null})
* @return the prepared citation
*/
private CSLCitation preRenderCitation(CSLCitation citation,
Set<CSLItemData> updatedItems) {
// get item IDs
int len = citation.getCitationItems().length;
String[] itemIds = new String[len];
CSLCitationItem[] items = citation.getCitationItems();
for (int i = 0; i < len; i++) {
CSLCitationItem item = items[i];
itemIds[i] = item.getId();
}
// register items
List<CSLItemData> registeredItems = registerItems(itemIds,
updatedItems, false);
// prepare items
CSLCitationItem[] preparedItems = new CSLCitationItem[len];
for (int i = 0; i < len; i++) {
CSLCitationItem item = items[i];
CSLItemData itemData = registeredItems.get(i);
// overwrite locator
if (item.getLocator() != null) {
itemData = new CSLItemDataBuilder(itemData)
.locator(item.getLocator())
.build();
}
preparedItems[i] = new CSLCitationItemBuilder(item)
.itemData(itemData)
.build();
}
// sort array of items
boolean unsorted = false;
if (citation.getProperties() != null &&
citation.getProperties().getUnsorted() != null) {
unsorted = citation.getProperties().getUnsorted();
}
if (!unsorted && style.getCitation().getSort() != null) {
Comparator<CSLItemData> itemComparator =
style.getCitation().getSort().comparator(style, locale);
Arrays.sort(preparedItems, (a, b) -> itemComparator.compare(
a.getItemData(), b.getItemData()));
}
return new CSLCitation(preparedItems,
citation.getCitationID(), citation.getProperties());
}
/**
* Render the given prepared citation
* @param preparedCitation the citation to render. The citation must have
* been prepared by {@link #preRenderCitation(CSLCitation, Set)}
* @return the rendered string
*/
private String renderCitation(CSLCitation preparedCitation) {
// render items
RenderContext ctx = new RenderContext(style, locale, null,
preparedCitation, Collections.unmodifiableList(generatedCitations));
style.getCitation().render(ctx);
return outputFormat.formatCitation(ctx);
}
public List<Citation> makeCitation(CSLCitation citation) {
Set<CSLItemData> updatedItems = new LinkedHashSet<>();
CSLCitation preparedCitation = preRenderCitation(citation, updatedItems);
String text = renderCitation(preparedCitation);
// re-render updated citations
List<Citation> result = new ArrayList<>();
if (!updatedItems.isEmpty()) {
List<GeneratedCitation> oldGeneratedCitations = generatedCitations;
generatedCitations = new ArrayList<>(oldGeneratedCitations.size());
for (int i = 0; i < oldGeneratedCitations.size(); i++) {
GeneratedCitation gc = oldGeneratedCitations.get(i);
boolean needsUpdate = false;
for (CSLItemData updatedItemData : updatedItems) {
for (CSLCitationItem item : gc.getOriginal().getCitationItems()) {
if (item.getId().equals(updatedItemData.getId())) {
needsUpdate = true;
break;
}
}
}
if (!needsUpdate) {
generatedCitations.add(gc);
continue;
}
// prepare citation again (!)
CSLCitation upc = preRenderCitation(gc.getOriginal(), null);
// render it again
String ut = renderCitation(upc);
if (!ut.equals(gc.getGenerated().getText())) {
// render result was different
Citation uc = new Citation(i, ut);
generatedCitations.add(new GeneratedCitation(
gc.getOriginal(), upc, uc));
result.add(uc);
}
}
}
// generate citation
Citation generatedCitation = new Citation(generatedCitations.size(), text);
generatedCitations.add(new GeneratedCitation(citation,
preparedCitation, generatedCitation));
result.add(generatedCitation);
return result;
}
/**
* Generates a bibliography for the registered citations
* @return the bibliography
*/
public Bibliography makeBibliography() {
return makeBibliography(null);
}
/**
* Generates a bibliography for the registered citations. Depending
* on the selection mode selects, includes, or excludes bibliography
* items whose fields and field values match the fields and field values
* from the given example item data objects.
* @param mode the selection mode
* @param selection the example item data objects that contain
* the fields and field values to match
* @return the bibliography
*/
public Bibliography makeBibliography(SelectionMode mode, CSLItemData... selection) {
return makeBibliography(mode, selection, null);
}
/**
* <p>Generates a bibliography for the registered citations. Depending
* on the selection mode selects, includes, or excludes bibliography
* items whose fields and field values match the fields and field values
* from the given example item data objects.</p>
* <p>Note: This method will be deprecated in the next release.</p>
* @param mode the selection mode
* @param selection the example item data objects that contain
* the fields and field values to match
* @param quash regardless of the item data in {@code selection}
* skip items if all fields/values from this list match
* @return the bibliography
*/
public Bibliography makeBibliography(SelectionMode mode,
CSLItemData[] selection, CSLItemData[] quash) {
return makeBibliography(item -> {
boolean include = true;
if (selection != null) {
switch (mode) {
case INCLUDE:
include = false;
for (CSLItemData s : selection) {
if (itemDataEqualsAny(item, s)) {
include = true;
break;
}
}
break;
case EXCLUDE:
for (CSLItemData s : selection) {
if (itemDataEqualsAny(item, s)) {
include = false;
break;
}
}
break;
case SELECT:
for (CSLItemData s : selection) {
if (!itemDataEqualsAny(item, s)) {
include = false;
break;
}
}
break;
}
}
if (include && quash != null) {
boolean match = true;
for (CSLItemData s : quash) {
if (!itemDataEqualsAny(item, s)) {
match = false;
break;
}
}
if (match) {
include = false;
}
}
return include;
});
}
/**
* Generates a bibliography for registered citations
* @param filter a function to apply to each registered citation item to
* determine if it should be included in the bibliography or not (may
* be {@code null} if all items should be included)
* @return the bibliography
*/
public Bibliography makeBibliography(Predicate<CSLItemData> filter) {
List<String> entries = new ArrayList<>();
for (CSLItemData item : sortedItems) {
if (filter != null && !filter.test(item)) {
continue;
}
RenderContext ctx = new RenderContext(style, locale, item);
style.getBibliography().render(ctx);
if (!ctx.getResult().isEmpty()) {
entries.add(outputFormat.formatBibliographyEntry(ctx));
}
}
return outputFormat.makeBibliography(entries.toArray(new String[0]),
style.getBibliography());
}
/**
* Resets the processor's state
*/
public void reset() {
outputFormat = new HtmlFormat();
convertLinks = false;
registeredItems.clear();
sortedItems.clear();
generatedCitations.clear();
}
/**
* Creates an ad hoc bibliography from the given citation items using the
* <code>"html"</code> output format. Calling this method is rather
* expensive as it initializes the CSL processor. If you need to create
* bibliographies multiple times in your application you should create
* the processor yourself and cache it if necessary.
* @param style the citation style to use. May either be a serialized
* XML representation of the style or a style's name such as <code>ieee</code>.
* In the latter case, the processor loads the style from the classpath (e.g.
* <code>/ieee.csl</code>)
* @param items the citation items to add to the bibliography
* @return the bibliography
* @throws IOException if the underlying JavaScript files or the CSL style
* could not be loaded
* @see #makeAdhocBibliography(String, String, CSLItemData...)
*/
public static Bibliography makeAdhocBibliography(String style, CSLItemData... items)
throws IOException {
return makeAdhocBibliography(style, "html", items);
}
/**
* Creates an ad hoc bibliography from the given citation items. Calling
* this method is rather expensive as it initializes the CSL processor.
* If you need to create bibliographies multiple times in your application
* you should create the processor yourself and cache it if necessary.
* @param style the citation style to use. May either be a serialized
* XML representation of the style or a style's name such as <code>ieee</code>.
* In the latter case, the processor loads the style from the classpath (e.g.
* <code>/ieee.csl</code>)
* @param outputFormat the processor's output format (one of
* <code>"html"</code>, <code>"text"</code>, <code>"asciidoc"</code>,
* <code>"fo"</code>, or <code>"rtf"</code>)
* @param items the citation items to add to the bibliography
* @return the bibliography
* @throws IOException if the underlying JavaScript files or the CSL style
* could not be loaded
*/
public static Bibliography makeAdhocBibliography(String style, String outputFormat,
CSLItemData... items) throws IOException {
ItemDataProvider provider = new ListItemDataProvider(items);
CSL csl = new CSL(provider, style);
csl.setOutputFormat(outputFormat);
String[] ids = new String[items.length];
for (int i = 0; i < items.length; ++i) {
ids[i] = items[i].getId();
}
csl.registerCitationItems(ids);
return csl.makeBibliography();
}
/**
* Test if any of the attributes of {@code b} match the ones of {@code a}.
* Note: This method will be deprecated in the next release
* @param a the first object
* @param b the object to match against
* @return {@code true} if the match succeeds
*/
private static boolean itemDataEqualsAny(CSLItemData a, CSLItemData b) {
if (a == b) {
return true;
}
if (b == null) {
return false;
}
if (b.getId() != null && Objects.equals(a.getId(), b.getId())) {
return true;
}
if (b.getType() != null && Objects.equals(a.getType(), b.getType())) {
return true;
}
if (b.getCategories() != null && Arrays.equals(a.getCategories(), b.getCategories())) {
return true;
}
if (b.getLanguage() != null && Objects.equals(a.getLanguage(), b.getLanguage())) {
return true;
}
if (b.getJournalAbbreviation() != null && Objects.equals(a.getJournalAbbreviation(), b.getJournalAbbreviation())) {
return true;
}
if (b.getShortTitle() != null && Objects.equals(a.getShortTitle(), b.getShortTitle())) {
return true;
}
if (b.getAuthor() != null && Arrays.equals(a.getAuthor(), b.getAuthor())) {
return true;
}
if (b.getCollectionEditor() != null && Arrays.equals(a.getCollectionEditor(), b.getCollectionEditor())) {
return true;
}
if (b.getComposer() != null && Arrays.equals(a.getComposer(), b.getComposer())) {
return true;
}
if (b.getContainerAuthor() != null && Arrays.equals(a.getContainerAuthor(), b.getContainerAuthor())) {
return true;
}
if (b.getDirector() != null && Arrays.equals(a.getDirector(), b.getDirector())) {
return true;
}
if (b.getEditor() != null && Arrays.equals(a.getEditor(), b.getEditor())) {
return true;
}
if (b.getEditorialDirector() != null && Arrays.equals(a.getEditorialDirector(), b.getEditorialDirector())) {
return true;
}
if (b.getInterviewer() != null && Arrays.equals(a.getInterviewer(), b.getInterviewer())) {
return true;
}
if (b.getIllustrator() != null && Arrays.equals(a.getIllustrator(), b.getIllustrator())) {
return true;
}
if (b.getOriginalAuthor() != null && Arrays.equals(a.getOriginalAuthor(), b.getOriginalAuthor())) {
return true;
}
if (b.getRecipient() != null && Arrays.equals(a.getRecipient(), b.getRecipient())) {
return true;
}
if (b.getReviewedAuthor() != null && Arrays.equals(a.getReviewedAuthor(), b.getReviewedAuthor())) {
return true;
}
if (b.getTranslator() != null && Arrays.equals(a.getTranslator(), b.getTranslator())) {
return true;
}
if (b.getAccessed() != null && Objects.equals(a.getAccessed(), b.getAccessed())) {
return true;
}
if (b.getContainer() != null && Objects.equals(a.getContainer(), b.getContainer())) {
return true;
}
if (b.getEventDate() != null && Objects.equals(a.getEventDate(), b.getEventDate())) {
return true;
}
if (b.getIssued() != null && Objects.equals(a.getIssued(), b.getIssued())) {
return true;
}
if (b.getOriginalDate() != null && Objects.equals(a.getOriginalDate(), b.getOriginalDate())) {
return true;
}
if (b.getSubmitted() != null && Objects.equals(a.getSubmitted(), b.getSubmitted())) {
return true;
}
if (b.getAbstrct() != null && Objects.equals(a.getAbstrct(), b.getAbstrct())) {
return true;
}
if (b.getAnnote() != null && Objects.equals(a.getAnnote(), b.getAnnote())) {
return true;
}
if (b.getArchive() != null && Objects.equals(a.getArchive(), b.getArchive())) {
return true;
}
if (b.getArchiveLocation() != null && Objects.equals(a.getArchiveLocation(), b.getArchiveLocation())) {
return true;
}
if (b.getArchivePlace() != null && Objects.equals(a.getArchivePlace(), b.getArchivePlace())) {
return true;
}
if (b.getAuthority() != null && Objects.equals(a.getAuthority(), b.getAuthority())) {
return true;
}
if (b.getCallNumber() != null && Objects.equals(a.getCallNumber(), b.getCallNumber())) {
return true;
}
if (b.getChapterNumber() != null && Objects.equals(a.getChapterNumber(), b.getChapterNumber())) {
return true;
}
if (b.getCitationNumber() != null && Objects.equals(a.getCitationNumber(), b.getCitationNumber())) {
return true;
}
if (b.getCitationLabel() != null && Objects.equals(a.getCitationLabel(), b.getCitationLabel())) {
return true;
}
if (b.getCollectionNumber() != null && Objects.equals(a.getCollectionNumber(), b.getCollectionNumber())) {
return true;
}
if (b.getCollectionTitle() != null && Objects.equals(a.getCollectionTitle(), b.getCollectionTitle())) {
return true;
}
if (b.getContainerTitle() != null && Objects.equals(a.getContainerTitle(), b.getContainerTitle())) {
return true;
}
if (b.getContainerTitleShort() != null && Objects.equals(a.getContainerTitleShort(), b.getContainerTitleShort())) {
return true;
}
if (b.getDimensions() != null && Objects.equals(a.getDimensions(), b.getDimensions())) {
return true;
}
if (b.getDOI() != null && Objects.equals(a.getDOI(), b.getDOI())) {
return true;
}
if (b.getEdition() != null && Objects.equals(a.getEdition(), b.getEdition())) {
return true;
}
if (b.getEvent() != null && Objects.equals(a.getEvent(), b.getEvent())) {
return true;
}
if (b.getEventPlace() != null && Objects.equals(a.getEventPlace(), b.getEventPlace())) {
return true;
}
if (b.getFirstReferenceNoteNumber() != null && Objects.equals(a.getFirstReferenceNoteNumber(), b.getFirstReferenceNoteNumber())) {
return true;
}
if (b.getGenre() != null && Objects.equals(a.getGenre(), b.getGenre())) {
return true;
}
if (b.getISBN() != null && Objects.equals(a.getISBN(), b.getISBN())) {
return true;
}
if (b.getISSN() != null && Objects.equals(a.getISSN(), b.getISSN())) {
return true;
}
if (b.getIssue() != null && Objects.equals(a.getIssue(), b.getIssue())) {
return true;
}
if (b.getJurisdiction() != null && Objects.equals(a.getJurisdiction(), b.getJurisdiction())) {
return true;
}
if (b.getKeyword() != null && Objects.equals(a.getKeyword(), b.getKeyword())) {
return true;
}
if (b.getLocator() != null && Objects.equals(a.getLocator(), b.getLocator())) {
return true;
}
if (b.getMedium() != null && Objects.equals(a.getMedium(), b.getMedium())) {
return true;
}
if (b.getNote() != null && Objects.equals(a.getNote(), b.getNote())) {
return true;
}
if (b.getNumber() != null && Objects.equals(a.getNumber(), b.getNumber())) {
return true;
}
if (b.getNumberOfPages() != null && Objects.equals(a.getNumberOfPages(), b.getNumberOfPages())) {
return true;
}
if (b.getNumberOfVolumes() != null && Objects.equals(a.getNumberOfVolumes(), b.getNumberOfVolumes())) {
return true;
}
if (b.getOriginalPublisher() != null && Objects.equals(a.getOriginalPublisher(), b.getOriginalPublisher())) {
return true;
}
if (b.getOriginalPublisherPlace() != null && Objects.equals(a.getOriginalPublisherPlace(), b.getOriginalPublisherPlace())) {
return true;
}
if (b.getOriginalTitle() != null && Objects.equals(a.getOriginalTitle(), b.getOriginalTitle())) {
return true;
}
if (b.getPage() != null && Objects.equals(a.getPage(), b.getPage())) {
return true;
}
if (b.getPageFirst() != null && Objects.equals(a.getPageFirst(), b.getPageFirst())) {
return true;
}
if (b.getPMCID() != null && Objects.equals(a.getPMCID(), b.getPMCID())) {
return true;
}
if (b.getPMID() != null && Objects.equals(a.getPMID(), b.getPMID())) {
return true;
}
if (b.getPublisher() != null && Objects.equals(a.getPublisher(), b.getPublisher())) {
return true;
}
if (b.getPublisherPlace() != null && Objects.equals(a.getPublisherPlace(), b.getPublisherPlace())) {
return true;
}
if (b.getReferences() != null && Objects.equals(a.getReferences(), b.getReferences())) {
return true;
}
if (b.getReviewedTitle() != null && Objects.equals(a.getReviewedTitle(), b.getReviewedTitle())) {
return true;
}
if (b.getScale() != null && Objects.equals(a.getScale(), b.getScale())) {
return true;
}
if (b.getSection() != null && Objects.equals(a.getSection(), b.getSection())) {
return true;
}
if (b.getSource() != null && Objects.equals(a.getSource(), b.getSource())) {
return true;
}
if (b.getStatus() != null && Objects.equals(a.getStatus(), b.getStatus())) {
return true;
}
if (b.getTitle() != null && Objects.equals(a.getTitle(), b.getTitle())) {
return true;
}
if (b.getTitleShort() != null && Objects.equals(a.getTitleShort(), b.getTitleShort())) {
return true;
}
if (b.getURL() != null && Objects.equals(a.getURL(), b.getURL())) {
return true;
}
if (b.getVersion() != null && Objects.equals(a.getVersion(), b.getVersion())) {
return true;
}
if (b.getVolume() != null && Objects.equals(a.getVolume(), b.getVolume())) {
return true;
}
return b.getYearSuffix() != null && Objects.equals(a.getYearSuffix(), b.getYearSuffix());
}
}
|
package com.github.sdorra.buildfrontend;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.zeroturnaround.exec.ProcessResult;
import java.io.File;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class NodeTest {
private static final String EXECUTABLE = String.format("parent%snode", File.separator);
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Mock
private ProcessResult result;
@Test
public void testExecute() {
CapturingNode node = new CapturingNode("work", EXECUTABLE);
node.execute("b", "c");
int i = 0;
assertEquals(EXECUTABLE, node.cmds.get(i++));
assertEquals("b", node.cmds.get(i++));
assertEquals("c", node.cmds.get(i++));
assertTrue(node.env.get("PATH").startsWith("parent"));
}
@Test
public void testWithNonZeroExitValue() {
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("exit value 2");
when(result.getExitValue()).thenReturn(2);
CapturingNode node = new CapturingNode("work", EXECUTABLE);
node.execute("b", "c");
}
public class CapturingNode extends Node {
private Map<String,String> env;
private List<String> cmds;
CapturingNode(String workingDirectory, String executable) {
super(new File(workingDirectory), new File(executable));
}
@Override
protected ProcessResult execute(Map<String,String> env, List<String> cmds) {
this.env = env;
this.cmds = cmds;
return result;
}
}
}
|
package com.bvb.spring.jms.listener;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import com.bvb.spring.jms.listener.config.PauseConfig;
import com.bvb.spring.jms.listener.exception.PauseConsumptionException;
import com.bvb.spring.jms.listener.keepalive.KeepAliveManager;
import com.bvb.spring.jms.listener.keepalive.KeepAliveResponse;
import com.bvb.spring.jms.listener.keepalive.KeepAliveService;
import com.bvb.spring.jms.listener.throttler.FixedRateThrottlerCounter;
import com.bvb.spring.jms.listener.utils.DateUtils;
import com.google.common.base.Preconditions;
/**
* This class extends the {@link DefaultMessageListenerContainer} so that it is possible for users of it to provide a keep
* alive timer that tests whether some aspect the listener requires is still valid and active. The container can be set
* so that it will start with no consumption occurring until the keep alive is valid. The clients of the PDMLC can choose
* to implement the keep alive service, which allows them to identify how long before the next keep alive should be called
* and whether the container can now start or stop.
* On top of these features the PDMLC can also receive a specific runtime exception thrown by the listener which allows
* it to take action to throttle the consumption of messages into the gateway. Due to the design of the DMLC it has not
* been possible to add code that would throttle consumption at a fixed rate.
*
*/
public class BackoffDefaultMessageListeningContainer extends DefaultMessageListenerContainer
{
private static final int KEEP_ALIVE_THEAD_COUNT = 2;
public static long MIN_THROTTLE_TIME_MS = TimeUnit.MINUTES.toMillis(2);
public static long THROTTLE_LESSEN_PERIOD_MS = TimeUnit.SECONDS.toMillis(60);
private boolean initiallyNotRunning = false;
private Object throttlingLock = new Object();
private Object taskSchedulerLock = new Object();
private KeepAliveService keepAliveService;
private ThreadPoolTaskScheduler scheduler;
private KeepAliveManager keepAliveManager;
private long keepAliveIntervalMs = TimeUnit.SECONDS.toMillis(15);
private ScheduledFuture<?> taskThrottleRelease;
private int actualFullconcurrentConsumers;
private int actualFullmaxConcurrentConsumers;
private volatile boolean stoppingFromExternalCall = false;
private List<DmlcStartObserver> observers = new ArrayList<DmlcStartObserver>();
/**
* Returns whether the DMLC is set to initially not start consuming messages until the keep alive returns success.
* @return {@code true} if initially we consume nothing on startup, otherwise {@code false}.
*/
public boolean isInitiallyStopped()
{
return initiallyNotRunning;
}
/**
* Set whether the DMLC is set to initially not start consuming messages until a keep alive returns success. This
* prevents messages being consumed until the connection is open.
* @param initiallyStopped {@code true} to not start until the keep alive succeeds. {@code false} to behave as a
* normal DMLC and start immediately.
*/
public void setInitiallyStopped(boolean initiallyStopped)
{
this.initiallyNotRunning = initiallyStopped;
}
/**
* Set the keep alive service. This service will be called based on the keep alive interval to identify whether the
* connection or upstream services are active. If they are not then consumption of messages can be throttled or stopped.
* @param keepAliveService the service to use.
*/
public void setKeepAliveService(KeepAliveService keepAliveService)
{
this.keepAliveService = keepAliveService;
}
@Override
public void start()
{
stoppingFromExternalCall = false;
if (!initiallyNotRunning)
{
startDmlc();
}
// start the keep alive thread and build the task scheduler, if not already done.
startKeepAliveAndBuildTaskScheduler();
}
@Override
public void stop()
{
doTaskStop();
super.stop();
}
@Override
public void shutdown()
{
doTaskStop();
stopTaskScheduler();
super.shutdown();
}
private void doTaskStop()
{
stoppingFromExternalCall = true;
if (keepAliveManager != null)
{
keepAliveManager.stop();
}
cancelThrottleTask();
notifyObserversStop(null);
}
private void notifyObserversStop(PauseConfig config)
{
for (DmlcStartObserver o : observers)
{
if (config == null)
{
o.stopped();
}
else
{
o.stopped(config);
}
}
}
private void notifyObserversRunning()
{
for (DmlcStartObserver o : observers)
{
o.running();
}
}
/**
* Add an observer who will get notified when the BDMLC stops or starts.
* @param observer the observer to add.
* @throws NullPointerException if the observer is null.
*/
public void setObserver(DmlcStartObserver observer)
{
registerObserver(observer);
}
/**
* Add an observer who will get notified when the BDMLC stops or starts.
* @param observer the observer to add.
* @throws NullPointerException if the observer is null.
*/
public void registerObserver(DmlcStartObserver observer)
{
Preconditions.checkNotNull(observer);
observers.add(observer);
}
/**
* Remove an observer.
* @param observer the observer to remove.
*/
public void unRegisterObserver(DmlcStartObserver observer)
{
observers.remove(observer);
}
@Override
public void stop(Runnable callback)
{
doTaskStop();
super.stop(callback);
}
@Override
public void setConcurrentConsumers(int concurrentConsumers)
{
super.setConcurrentConsumers(concurrentConsumers);
updateConcurrency();
}
@Override
public void setConcurrency(String concurrency)
{
super.setConcurrency(concurrency);
updateConcurrency();
}
@Override
public void setMaxConcurrentConsumers(int count)
{
super.setMaxConcurrentConsumers(count);
updateConcurrency();
}
@Override
protected void handleListenerException(Throwable ex)
{
// if this is an exception indicating some back-off or keep alive is required
if (ex.getCause() instanceof PauseConsumptionException)
{
handlePauseConsumptionException((PauseConsumptionException) ex.getCause());
}
super.handleListenerException(ex);
}
protected void handlePauseConsumptionException(PauseConsumptionException ex)
{
// Stop the DMLC and throttle the consumption if required
stopDmlc(ex.getConfig());
logger.warn(String.format("Gateway->Stopped. Gateway message listener returned PauseConsumption with config: [%s]",
ex.getConfig().toString()));
}
protected void setThrottledMaxConcurrentConsumers(int count)
{
super.setMaxConcurrentConsumers(count);
}
protected void setThrottledConcurrentConsumers(int count)
{
super.setConcurrentConsumers(count);
}
protected void updateConcurrency()
{
actualFullconcurrentConsumers = getConcurrentConsumers();
actualFullmaxConcurrentConsumers = getMaxConcurrentConsumers();
}
protected void startDmlc()
{
if (!isRunning())
{
super.start();
notifyObserversRunning();
}
}
protected void stopDmlc(PauseConfig config)
{
if (isRunning())
{
super.stop();
notifyObserversStop(config);
}
if (config != null)
{
// throttle if returned in the response
throttleConsumers(config);
// set the next keep alive, if asked to
keepAliveManager.rescheduleAlways(config.getDelayConsumptionForMs());
}
}
private void throttleConsumers(PauseConfig config)
{
if (config.isThrottled())
{
synchronized (throttlingLock)
{
int currentMax = getMaxConcurrentConsumers();
PauseConfig configToUse = new PauseConfig(currentMax, config);
int maxConsumers = configToUse.getThrottleMaxConcurrent();
// start a timer to release the throttling after a period of time
startThrottlingReleaseTask(configToUse);
int currentConcurrent = getConcurrentConsumers();
if (currentConcurrent > maxConsumers)
{
setThrottledConcurrentConsumers(maxConsumers);
}
setThrottledMaxConcurrentConsumers(maxConsumers);
}
}
}
/*
* Needs to be synchronized with the throttling lock object.
*/
private void startThrottlingReleaseTask(PauseConfig config)
{
cancelThrottleTask();
scheduleThrottlingRelaxTask(config);
}
private void scheduleThrottlingRelaxTask(PauseConfig config)
{
// When stopping do not reschedule any tasks
if (!stoppingFromExternalCall)
{
// schedule another task
int target = actualFullmaxConcurrentConsumers;
FixedRateThrottlerCounter counter = new FixedRateThrottlerCounter(target, config);
// schedule to run in the future when the first throttling relax interval occurs
taskThrottleRelease =
scheduler.scheduleWithFixedDelay(new ThrottlingRelaxerRunnable(counter),
DateUtils.getNowPlusMs(config.getThrottleRelaxIntervalMs()), THROTTLE_LESSEN_PERIOD_MS);
}
}
private void cancelThrottleTask()
{
synchronized(throttlingLock)
{
if (taskThrottleRelease != null)
{
// cancel existing task
taskThrottleRelease.cancel(false);
taskThrottleRelease = null;
}
}
}
private void startKeepAliveAndBuildTaskScheduler()
{
synchronized (taskSchedulerLock)
{
if (scheduler == null)
{
this.scheduler = buildScheduler(KEEP_ALIVE_THEAD_COUNT);
keepAliveManager = new KeepAliveManager(scheduler, new KeepAliveRunnable(), keepAliveIntervalMs);
}
keepAliveManager.start();
}
}
private synchronized void stopTaskScheduler()
{
synchronized (taskSchedulerLock)
{
if (scheduler != null)
{
scheduler.shutdown();
scheduler = null;
}
}
}
private ThreadPoolTaskScheduler buildScheduler(int poolSize)
{
ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
scheduler.setThreadNamePrefix(this.getBeanName() + "-KeepAlive");
scheduler.setPoolSize(poolSize);
scheduler.initialize();
return scheduler;
}
/**
* Get the keep alive interval in milliseconds. This is in the interval between keep alive runs, where the keep alive
* service returns whether the gateway can continue with normal operation.
* @return the keep alive interval.
*/
public long getKeepAliveInterval()
{
if (keepAliveManager != null)
{
return keepAliveManager.getKeepAliveInterval();
}
else
{
return keepAliveIntervalMs;
}
}
/**
* Set the keep alive interval in milliseconds.
* @param keepAliveIntervalMs the interval to set in milliseconds.
*/
public void setKeepAliveInterval(long keepAliveIntervalMs)
{
this.keepAliveIntervalMs = keepAliveIntervalMs;
}
class KeepAliveRunnable implements Runnable
{
@Override
public void run()
{
// default to always starting the DMLC if there is no keep alive service defined
boolean start = true;
if (keepAliveService != null)
{
start = processByService();
}
// if we want to start the DMLC
if (start)
{
startDmlc();
}
}
private boolean processByService()
{
KeepAliveResponse response = keepAliveService.keepAlive();
if (response == null)
{
logger.warn("Keep alive service returned null which is not permitted, ignoring result");
}
else if (response.isSuccess())
{
// only restart the scheduling if the new value is different from the old
keepAliveManager.rescheduleIfDifferent(response.getPauseConfig().getDelayConsumptionForMs());
return true;
}
else
{
logger.warn(String.format("Keep alive service return failure, stopping message consumption. Using settings "
+ "from KeepAliveResponse: [%s]", response.toString()));
stopDmlc(response.getPauseConfig());
}
return false;
}
}
public class ThrottlingRelaxerRunnable implements Runnable
{
private final FixedRateThrottlerCounter counter;
public ThrottlingRelaxerRunnable(FixedRateThrottlerCounter counter)
{
this.counter = counter;
}
@Override
public void run()
{
// if we are not running then don't increase any concurrent consumers
if (!isRunning())
{
logger.trace("Throttling relaxer running, but DMLC is not running");
return;
}
int newMax = counter.incrementAndGet();
int current = getMaxConcurrentConsumers();
boolean done = false;
if (newMax > current)
{
logger.info(String.format("Relaxing throttling, growing max from: [%s] to [%s]", current, newMax));
setThrottledConcurrentConsumers(newMax);
}
else
{
logger.info(String.format("Throttler relaxer running but max concurrent consumers: [%s], then new value: [%s]",
current, newMax));
done = true;
}
// check if we are done
if (done || counter.isDone())
{
int max = (newMax > current) ? newMax : current;
logger.info(String.format("Cancelling Throttler relaxer task as old max re-instated or passed: [%s]", max));
cancelThrottleTask();
}
}
}
}
|
package net.oneandone.stool.client;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import net.oneandone.inline.ArgumentException;
import net.oneandone.sushi.fs.FileNotFoundException;
import net.oneandone.sushi.fs.NodeInstantiationException;
import net.oneandone.sushi.fs.World;
import net.oneandone.sushi.fs.file.FileNode;
import net.oneandone.sushi.fs.http.HttpFilesystem;
import net.oneandone.sushi.fs.http.HttpNode;
import net.oneandone.sushi.fs.http.StatusException;
import net.oneandone.sushi.fs.http.model.Body;
import net.oneandone.sushi.fs.http.model.Method;
import net.oneandone.sushi.util.Separator;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class Client {
/** @param token null to work anonymously */
public static Client token(World world, String name, String url, FileNode wireLog, String clientInvocation, String clientCommand,
String token) throws NodeInstantiationException {
return doCreate(world, name, url, wireLog, clientInvocation, clientCommand, token, null, null);
}
public static Client basicAuth(World world, String name, String url, FileNode wireLog, String clientInvocation, String clientCommand,
String username, String password) throws NodeInstantiationException {
return doCreate(world, name, url, wireLog, clientInvocation, clientCommand, null, username, password);
}
private static Client doCreate(World world, String name, String url, FileNode wireLog, String clientInvocation, String clientCommand,
String token, String username, String password) throws NodeInstantiationException {
HttpNode node;
if (wireLog != null) {
HttpFilesystem.wireLog(wireLog.getAbsolute());
}
node = (HttpNode) world.validNode(url);
node.getRoot().addExtraHeader("X-stool-client-invocation", clientInvocation);
node.getRoot().addExtraHeader("X-stool-client-command", clientCommand);
if (token != null) {
node.getRoot().addExtraHeader("X-authentication", token);
}
if (username != null) {
node.getRoot().setCredentials(username, password);
}
return new Client(name, node);
}
private final String name;
private final HttpNode root;
private final JsonParser parser;
public Client(String name, HttpNode root) {
this.name = name;
this.root = root;
this.parser = new JsonParser();
}
public String getName() {
return name;
}
public String auth() throws IOException {
HttpNode node;
node = node("auth");
try {
return postJson(node, "").getAsString();
} catch (StatusException e) {
throw beautify(node, e);
}
}
/** @param filter null to return all stages */
public List<String> list(String filter) throws IOException {
HttpNode node;
JsonArray references;
List<String> result;
node = node("stages");
if (filter != null) {
node = node.withParameter("filter", filter);
}
references = getJson(node).getAsJsonArray();
result = new ArrayList<>(references.size());
for (JsonElement element : references) {
result.add(element.getAsString());
}
return result;
}
//-- create, build, start, stop, remove
public void create(String name, Map<String, String> config) throws IOException {
HttpNode node;
node = node("stages/" + name);
node = node.withParameters(config);
postEmpty(node, "");
}
public BuildResult build(String stage, String app, FileNode war, String comment,
String origin, String createdBy, String createdOn, boolean noCache, int keep,
Map<String, String> arguments) throws Exception {
HttpNode node;
JsonObject obj;
JsonElement error;
String result;
node = node(stage, "build");
node = node.withParameter("app", app);
node = node.withParameter("war", war.getAbsolute());
node = node.withParameter("comment", comment);
node = node.withParameter("origin", origin);
node = node.withParameter("created-by", createdBy);
node = node.withParameter("created-on", createdOn);
node = node.withParameter("no-cache", noCache);
node = node.withParameter("keep", keep);
node = node.withParameters("arg.", arguments);
try (InputStream src = war.newInputStream()) {
result = node.getWorld().getSettings().string(node.post(new Body(null, null, war.size(), src, false)));
}
obj = parser.parse(result).getAsJsonObject();
error = obj.get("error");
return new BuildResult(error == null ? null : error.getAsString(), obj.get("output").getAsString());
}
public List<String> start(String stage, int http, int https, Map<String, String> startEnvironment, Map<String, Integer> apps) throws IOException {
HttpNode node;
List<String> started;
node = node(stage, "start");
node = node.withParameter("http", http);
node = node.withParameter("https", https);
node = node.withParameters("env.", startEnvironment);
node = node.withParameters("app.", apps);
try {
started = array(postJson(node, "").getAsJsonArray());
} catch (StatusException e) {
throw beautify(node, e);
}
if (started.isEmpty()) {
throw new IOException("stage is already started");
}
return started;
}
public Map<String, List<String>> awaitStartup(String stage) throws IOException {
JsonObject response;
Map<String, List<String>> result;
response = getJson(node(stage, "await-startup")).getAsJsonObject();
result = new LinkedHashMap<>();
for (Map.Entry<String, JsonElement> entry : response.entrySet()) {
result.put(entry.getKey(), array(entry.getValue().getAsJsonArray()));
}
return result;
}
private static List<String> array(JsonArray json) {
List<String> result;
result = new ArrayList<>(json.size());
for (JsonElement element : json) {
result.add(element.getAsString());
}
return result;
}
public List<String> stop(String stage, List<String> apps) throws IOException {
List<String> stopped;
HttpNode node;
node = node(stage, "stop").withParameter("apps", Separator.COMMA.join(apps));
try {
stopped = array(postJson(node, "").getAsJsonArray());
} catch (StatusException e) {
throw beautify(node, e);
}
if (stopped.isEmpty()) {
throw new IOException("stage is already stopped");
}
return stopped;
}
public void remove(String stage) throws IOException {
postEmpty(node(stage, "remove"), "");
}
public Map<String, String> status(String stage, List<String> select) throws IOException {
HttpNode node;
JsonObject status;
LinkedHashMap<String, String> result;
node = node(stage, "status");
node = node.withParameter("select", Separator.COMMA.join(select));
status = getJson(node).getAsJsonObject();
result = new LinkedHashMap<>();
for (String name : status.keySet()) {
result.put(name, status.get(name).getAsString());
}
if (result.containsKey("name")) {
result.replace("name", new Reference(this, stage).toString());
}
return result;
}
public List<String> history(String stage, boolean details, int max) throws IOException {
HttpNode node;
JsonArray references;
List<String> result;
node = node(stage,"history");
node = node.withParameter("details", details);
node = node.withParameter("max", max);
references = getJson(node).getAsJsonArray();
result = new ArrayList<>(references.size());
for (JsonElement element : references) {
result.add(element.getAsString());
}
return result;
}
public String quota() throws IOException {
String result;
result = getJson(node("quota")).getAsString();
return result.isEmpty() ? null : result;
}
public int memUnreserved() throws IOException {
String result;
result = getJson(node("memUnreserved")).getAsString();
return Integer.parseInt(result);
}
public List<String> apps(String stage) throws IOException {
return array(getJson(node(stage, "apps")).getAsJsonArray());
}
//-- validate
public List<String> validate(String stage, boolean email, boolean repair) throws IOException {
HttpNode node;
node = node(stage,"validate");
node = node.withParameter("email", email);
node = node.withParameter("repair", repair);
return array(postJson(node,"").getAsJsonArray());
}
//-- config command
public Map<String, String> getProperties(String stage) throws Exception {
Map<String, String> result;
JsonObject properties;
properties = getJson(node(stage, "properties")).getAsJsonObject();
result = new LinkedHashMap<>();
for (String name : properties.keySet()) {
result.put(name, properties.get(name).getAsString());
}
return result;
}
public Map<String, String> setProperties(String stage, Map<String, String> arguments) throws IOException {
HttpNode node;
JsonObject response;
Map<String, String> result;
node = node(stage, "set-properties");
node = node.withParameters(arguments);
response = postJson(node, "").getAsJsonObject();
result = new LinkedHashMap<>();
for (Map.Entry<String, JsonElement> entry : response.entrySet()) {
result.put(entry.getKey(), entry.getValue().getAsString());
}
return result;
}
//-- app info
public List<String> appInfo(String stage, String app) throws Exception {
return array(getJson(node(stage, "appInfo").withParameter("app", app)).getAsJsonArray());
}
private HttpNode node(String stage, String cmd) {
return node("stages/" + stage + "/" + cmd);
}
private HttpNode node(String path) {
return root.join(path);
}
private IOException beautify(HttpNode node, StatusException e) {
byte[] body;
switch (e.getStatusLine().code) {
case 400:
case 404:
body = e.getResponseBytes();
// feels ugly ...
throw new ArgumentException(body == null ? e.getMessage() : node.getWorld().getSettings().string(body), e);
case 401:
return new IOException("401 unauthenticated - " + node.getUri(), e);
default:
return e;
}
}
private void postEmpty(HttpNode node, String body) throws IOException {
String response;
response = node.post(body);
if (!response.isEmpty()) {
throw new IOException("unexpected response: " + response);
}
}
private JsonElement postJson(HttpNode node, String body) throws IOException {
return parser.parse(node.post(body));
}
private JsonElement getJson(HttpNode node) throws IOException {
// directly invoke get because I don't want wrapper exception from Node.newInputStream or newReader
try (InputStream src = Method.get(node)) {
return parser.parse(new InputStreamReader(src));
} catch (FileNotFoundException e) {
if (e.getCause() instanceof StatusException) {
throw beautify(node, (StatusException) e.getCause());
} else {
throw e;
}
}
}
}
|
package com.j256.simplemagic.types;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import com.j256.simplemagic.entries.MagicFormatter;
import com.j256.simplemagic.entries.MagicMatcher.MutableOffset;
public class RegexTypeTest {
@Test
public void testBasic() {
RegexType type = new RegexType();
Object test = type.convertTestString("regex", "hello[abc]");
byte[] bytes = "some line with helloa in it".getBytes();
Object extracted = type.isMatch(test, null, false, null, new MutableOffset(0), bytes);
assertNotNull(extracted);
assertEquals("helloa", renderValue(extracted, type, new MagicFormatter("%s")));
}
@Test
public void testCaseInsensitive() {
RegexType type = new RegexType();
Object test = type.convertTestString("regex/c", "hello[ABC]");
byte[] bytes = "some line with helloa in it".getBytes();
Object extracted = type.isMatch(test, null, false, null, new MutableOffset(0), bytes);
assertNotNull(extracted);
assertEquals("helloa", renderValue(extracted, type, new MagicFormatter("%s")));
}
@Test
public void testSlashes() {
RegexType type = new RegexType();
/*
* \\xB is decimal 11 which is octal 013. The 8 backslashes doesn't seem right but if the Java string does one
* level of \, magic file does another, then you need 2 x 2 x 2 == 8.
*/
Object test = type.convertTestString("regex", "hrm\\t\\0\\xB\\\\\\\\wow");
byte[] bytes = "some line with hrm\t\0\13\\wow in it".getBytes();
Object extracted = type.isMatch(test, null, false, null, new MutableOffset(0), bytes);
assertNotNull(extracted);
assertEquals("hrm\t\0\13\\wow", renderValue(extracted, type, new MagicFormatter("%s")));
}
@Test
public void testExtractValueFromBytes() {
new RegexType().extractValueFromBytes(0, null, true);
}
private String renderValue(Object extracted, RegexType type, MagicFormatter formatter) {
StringBuilder sb = new StringBuilder();
type.renderValue(sb, extracted, formatter);
return sb.toString();
}
}
|
// This file is part of the Whiley-to-Java Compiler (wyjc).
// The Whiley-to-Java Compiler is free software; you can redistribute
// it and/or modify it under the terms of the GNU General Public
// The Whiley-to-Java Compiler is distributed in the hope that it
// You should have received a copy of the GNU General Public
package wyil.lang;
import java.io.PrintStream;
import java.util.*;
import wyil.util.Pair;
import wyjvm.lang.Constant;
public abstract class Type {
// Type Constructors
public static final Any T_ANY = new Any();
public static final Void T_VOID = new Void();
public static final Null T_NULL = new Null();
public static final Bool T_BOOL = new Bool();
public static final Int T_INT = new Int();
public static final Real T_REAL = new Real();
public static final Meta T_META = new Meta();
/**
* Construct a tuple type using the given element types.
*
* @param element
*/
public static final Tuple T_TUPLE(Type... elements) {
int len = 1;
for(Type b : elements) {
// could be optimised slightly
len += nodes(b).length;
}
Node[] nodes = new Node[len];
int[] children = new int[elements.length];
int start = 1;
for(int i=0;i!=elements.length;++i) {
children[i] = start;
Node[] comps = nodes(elements[i]);
insertNodes(start,comps,nodes);
start += comps.length;
}
nodes[0] = new Node(K_TUPLE, children);
return new Tuple(nodes);
}
/**
* Construct a process type using the given element type.
*
* @param element
*/
public static final Process T_PROCESS(Type element) {
if (element instanceof Leaf) {
return new Process(new Node[] { new Node(K_PROCESS, 1),
new Node(leafKind((Leaf) element), null) });
} else {
// Compound type
Node[] nodes = insertComponent(((Compound) element).nodes);
nodes[0] = new Node(K_PROCESS, 1);
return new Process(nodes);
}
}
public static final Existential T_EXISTENTIAL(NameID name) {
if (name == null) {
throw new IllegalArgumentException(
"existential name cannot be null");
}
return new Existential(name);
}
/**
* Construct a set type using the given element type.
*
* @param element
*/
public static final Set T_SET(Type element) {
if (element instanceof Leaf) {
return new Set(new Node[] { new Node(K_SET, 1),
new Node(leafKind((Leaf) element), null) });
} else {
// Compound type
Node[] nodes = insertComponent(((Compound) element).nodes);
nodes[0] = new Node(K_SET, 1);
return new Set(nodes);
}
}
/**
* Construct a list type using the given element type.
*
* @param element
*/
public static final List T_LIST(Type element) {
if (element instanceof Leaf) {
return new List(new Node[] { new Node(K_LIST, 1),
new Node(leafKind((Leaf) element), null) });
} else {
// Compound type
Node[] nodes = insertComponent(((Compound) element).nodes);
nodes[0] = new Node(K_LIST, 1);
return new List(nodes);
}
}
/**
* Construct a dictionary type using the given key and value types.
*
* @param element
*/
public static final Dictionary T_DICTIONARY(Type key, Type value) {
Node[] keyComps = nodes(key);
Node[] valueComps = nodes(value);
Node[] nodes = new Node[1 + keyComps.length + valueComps.length];
insertNodes(1,keyComps,nodes);
insertNodes(1+keyComps.length,valueComps,nodes);
nodes[0] = new Node(K_DICTIONARY, new Pair(1,1+keyComps.length));
return new Dictionary(nodes);
}
/**
* Construct a union type using the given type bounds
*
* @param element
*/
public static final Union T_UNION(Collection<Type> bounds) {
Type[] ts = new Type[bounds.size()];
int i = 0;
for(Type t : bounds) {
ts[i++] = t;
}
return T_UNION(ts);
}
/**
* Construct a union type using the given type bounds
*
* @param element
*/
public static final Union T_UNION(Type... bounds) {
// include child unions
int len = 1;
for(Type b : bounds) {
// could be optimised slightly
len += nodes(b).length;
}
Node[] nodes = new Node[len];
int[] children = new int[bounds.length];
int start = 1;
for(int i=0;i!=bounds.length;++i) {
children[i] = start;
Node[] comps = nodes(bounds[i]);
insertNodes(start,comps,nodes);
start += comps.length;
}
nodes[0] = new Node(K_UNION, children);
return new Union(nodes);
}
public static final Fun T_FUN(Process receiver, Type ret,
Collection<Type> params) {
Type[] ts = new Type[params.size()];
int i = 0;
for (Type t : params) {
ts[i++] = t;
}
return T_FUN(receiver, ret, ts);
}
/**
* Construct a function type using the given return and parameter types.
*
* @param element
*/
public static final Fun T_FUN(Process receiver, Type ret, Type... params) {
Node[] reccomps;
if(receiver != null) {
reccomps = nodes(receiver);
} else {
reccomps = new Node[0];
}
Node[] retcomps = nodes(ret);
int len = 1 + reccomps.length + retcomps.length;
for(Type b : params) {
// could be optimised slightly
len += nodes(b).length;
}
Node[] nodes = new Node[len];
int[] children = new int[2 + params.length];
insertNodes(1,reccomps,nodes);
insertNodes(1+reccomps.length,retcomps,nodes);
children[0] = receiver == null ? -1 : 1;
children[1] = 1 + reccomps.length;
int start = 1 + reccomps.length + retcomps.length;
for(int i=0;i!=params.length;++i) {
children[i+2] = start;
Node[] comps = nodes(params[i]);
insertNodes(start,comps,nodes);
start += comps.length;
}
nodes[0] = new Node(K_FUNCTION, children);
return new Fun(nodes);
}
/**
* Construct a record type using the given fields.
*
* @param element
*/
public static final Record T_RECORD(Map<String,Type> fields) {
ArrayList<String> keys = new ArrayList<String>(fields.keySet());
Collections.sort(keys);
int len = 1;
for(int i=0;i!=keys.size();++i) {
String k = keys.get(i);
Type t = fields.get(k);
len += nodes(t).length;
}
Node[] nodes = new Node[len];
Pair<String,Integer>[] children = new Pair[fields.size()];
int start = 1;
for(int i=0;i!=children.length;++i) {
String k = keys.get(i);
children[i] = new Pair<String,Integer>(k,start);
Node[] comps = nodes(fields.get(k));
insertNodes(start,comps,nodes);
start += comps.length;
}
nodes[0] = new Node(K_RECORD,children);
return new Record(nodes);
}
/**
* Construct a label type. These are used in the construction of recursive
* types. Essentially, a label corresponds to the leaf of a recursive type,
* which we can then "close" later on as we build up the type. For example,
* we construct the recursive type <code>X<null|{X next}></code> as follows:
*
* <pre>
* HashMap<String,Type> fields = new HashMap<String,Type>();
* fields.put("next",T_LABEL("X")); *
* Type tmp = T_UNION(T_NULL,T_RECORD(fields));
* Type type = T_RECURSIVE("X",tmp);
* </pre>
*
* <b>NOTE:</b> a type containing a label is not considered valid until it
* is closed using a recursive type.
*
* @param label
* @return
*/
public static final Type T_LABEL(String label) {
return new Compound(new Node[]{new Node(K_LABEL,label)});
}
public static final Type T_RECURSIVE(String label, Type type) {
// first stage, identify all matching labels
if(type instanceof Leaf) { throw new IllegalArgumentException("cannot close a leaf type"); }
Compound compound = (Compound) type;
Node[] nodes = compound.nodes;
int[] rmap = new int[nodes.length];
int nmatches = 0;
for(int i=0;i!=nodes.length;++i) {
Node c = nodes[i];
if(c.kind == K_LABEL && c.data.equals(label)) {
rmap[i] = 0;
nmatches++;
} else {
rmap[i] = i - nmatches;
}
}
if (nmatches == 0) {
throw new IllegalArgumentException(
"type cannot be closed, as it contains no matching labels");
}
Node[] newnodes = new Node[nodes.length-nmatches];
nmatches = 0;
for(int i=0;i!=nodes.length;++i) {
Node c = nodes[i];
if(c.kind == K_LABEL && c.data.equals(label)) {
nmatches++;
} else {
newnodes[i-nmatches] = remap(nodes[i],rmap);
}
}
return construct(newnodes);
}
/**
* The following code converts a "type string" into an actual type. This is
* useful, amongst other things, for debugging.
*
* @param str
* @return
*/
public static Type fromString(String str) {
return new TypeParser(str).parse();
}
private static class TypeParser {
private int index;
private String str;
public TypeParser(String str) {
this.str = str;
}
public Type parse() {
Type term = parseTerm();
skipWhiteSpace();
while(index < str.length() && str.charAt(index) == '|') {
// union type
match("|");
term = T_UNION(term,parse());
skipWhiteSpace();
}
return term;
}
public Type parseTerm() {
skipWhiteSpace();
char lookahead = str.charAt(index);
switch (lookahead) {
case 'a':
match("any");
return T_ANY;
case 'v':
match("void");
return T_VOID;
case 'n':
match("null");
return T_NULL;
case 'b':
match("bool");
return T_BOOL;
case 'i':
match("int");
return T_INT;
case 'r':
match("real");
return T_REAL;
case '[':
{
match("[");
Type elem = parse();
match("]");
return T_LIST(elem);
}
case '{':
{
match("{");
Type elem = parse();
skipWhiteSpace();
if(index < str.length() && str.charAt(index) != '}') {
// record
HashMap<String,Type> fields = new HashMap<String,Type>();
String id = parseIdentifier();
fields.put(id, elem);
skipWhiteSpace();
while(index < str.length() && str.charAt(index) == ',') {
match(",");
elem = parse();
id = parseIdentifier();
fields.put(id, elem);
skipWhiteSpace();
}
match("}");
return T_RECORD(fields);
}
match("}");
return T_SET(elem);
}
default:
throw new IllegalArgumentException("invalid type string: "
+ str);
}
}
private String parseIdentifier() {
skipWhiteSpace();
int start = index;
while (index < str.length()
&& Character.isJavaIdentifierPart(str.charAt(index))) {
index++;
}
return str.substring(start,index);
}
private void skipWhiteSpace() {
while (index < str.length()
&& Character.isWhitespace(str.charAt(index))) {
index++;
}
}
private void match(String match) {
skipWhiteSpace();
if ((str.length() - index) < match.length()
|| !str.startsWith(match, index)) {
throw new IllegalArgumentException("invalid type string: "
+ str);
}
index += match.length();
}
}
/**
* This is a utility helper for constructing types. In particular, it's
* useful for determine whether or not a type needs to be closed. An open
* type is one which contains a "dangling" reference to some node which
* needs to be connected to back to form a cycle.
*
* @param label
* @param t
* @return
*/
public static boolean isOpen(String label, Type t) {
if (t instanceof Leaf) {
return false;
}
Compound graph = (Compound) t;
for (Node n : graph.nodes) {
if (n.kind == K_LABEL && n.data.equals(label)) {
return true;
}
}
return false;
}
// Serialisation Helpers
/**
* The Type.Builder interface is essentially a way of separating the
* internals of the type implementation from clients which may want to
* serialise a given type graph.
*/
public interface Builder {
/**
* Set the number of nodes required for the type being built. This
* method is called once, before all other methods are called. The
* intention is to give builders a chance to statically initialise data
* structures based on the number of nodes required.
*
* @param numNodes
*/
void initialise(int numNodes);
void buildPrimitive(int index, Type.Leaf type);
void buildExistential(int index, NameID name);
void buildSet(int index, int element);
void buildList(int index, int element);
void buildProcess(int index, int element);
void buildDictionary(int index, int key, int value);
void buildTuple(int index, int... elements);
void buildRecord(int index, Pair<String, Integer>... fields);
void buildFunction(int index, int receiver, int ret, int... parameters);
void buildUnion(int index, int... bounds);
}
/**
* This class provides an empty implementation of a type builder, which is
* useful for define simple builders.
*
* @author djp
*
*/
public static class AbstractBuilder implements Type.Builder {
public void initialise(int numNodes) {
}
public void buildPrimitive(int index, Type.Leaf type) {
}
public void buildExistential(int index, NameID name) {
}
public void buildSet(int index, int element) {
}
public void buildList(int index, int element) {
}
public void buildProcess(int index, int element) {
}
public void buildDictionary(int index, int key, int value) {
}
public void buildTuple(int index, int... elements) {
}
public void buildRecord(int index, Pair<String, Integer>... fields) {
}
public void buildFunction(int index, int receiver, int ret,
int... parameters) {
}
public void buildUnion(int index, int... bounds) {
}
}
public static class InternalBuilder implements Type.Builder {
private Node[] nodes;
public Type type() {
return construct(nodes);
}
public void initialise(int numNodes) {
nodes = new Node[numNodes];
}
public void buildPrimitive(int index, Type.Leaf type) {
nodes[index] = new Node(leafKind(type),null);
}
public void buildExistential(int index, NameID name) {
if (name == null) {
throw new IllegalArgumentException(
"existential name cannot be null");
}
nodes[index] = new Node(K_EXISTENTIAL,name);
}
public void buildSet(int index, int element) {
nodes[index] = new Node(K_SET,element);
}
public void buildList(int index, int element) {
nodes[index] = new Node(K_LIST,element);
}
public void buildProcess(int index, int element) {
nodes[index] = new Node(K_PROCESS,element);
}
public void buildDictionary(int index, int key, int value) {
nodes[index] = new Node(K_DICTIONARY,new Pair(key,value));
}
public void buildTuple(int index, int... elements) {
nodes[index] = new Node(K_TUPLE,elements);
}
public void buildRecord(int index, Pair<String, Integer>... fields) {
nodes[index] = new Node(K_RECORD,fields);
}
public void buildFunction(int index, int receiver, int ret,
int... parameters) {
int[] items = new int[parameters.length+2];
items[0] = receiver;
items[1] = ret;
System.arraycopy(parameters,0,items,2,parameters.length);
nodes[index] = new Node(K_FUNCTION,items);
}
public void buildUnion(int index, int... bounds) {
nodes[index] = new Node(K_UNION,bounds);
}
}
/**
* The print builder is an example implementation of type builder which
* simply constructs a textual representation of the type in the form of a
* graph.
*/
public static class PrintBuilder implements Builder {
private final PrintStream out;
public PrintBuilder(PrintStream out) {
this.out = out;
}
public void initialise(int numNodes) { }
public void buildPrimitive(int index, Type.Leaf type) {
out.println("#" + index + " = " + type);
}
public void buildExistential(int index, NameID name) {
out.println("#" + index + " = ?" + name);
}
public void buildSet(int index, int element) {
out.println("#" + index + " = {#" + element + "}");
}
public void buildList(int index, int element) {
out.println("#" + index + " = [#" + element + "]");
}
public void buildProcess(int index, int element) {
out.println("#" + index + " = process #" + element);
}
public void buildDictionary(int index, int key, int value) {
out.println("#" + index + " = {#" + key + "->#" + value + "}");
}
public void buildTuple(int index, int... elements) {
out.print("#" + index + " = (");
boolean firstTime=true;
for(int e : elements) {
if(!firstTime) {
out.print(", ");
}
firstTime=false;
out.print("
}
out.println(")");
}
public void buildRecord(int index, Pair<String, Integer>... fields) {
out.print("#" + index + " = {");
boolean firstTime=true;
for(Pair<String,Integer> e : fields) {
if(!firstTime) {
out.print(", ");
}
firstTime=false;
out.print("#" + e.second() + " " + e.first());
}
out.println("}");
}
public void buildFunction(int index, int receiver, int ret, int... parameters) {
out.print("#" + index + " = ");
if(receiver != -1) {
out.print("#" + receiver + "::");
}
out.print("#" + ret + "(");
boolean firstTime=true;
for(int e : parameters) {
if(!firstTime) {
out.print(", ");
}
firstTime=false;
out.print("
}
out.println(")");
}
public void buildUnion(int index, int... bounds) {
out.print("#" + index + " = ");
boolean firstTime=true;
for(int e : bounds) {
if(!firstTime) {
out.print(" | ");
}
firstTime=false;
out.print("
}
out.println();
}
}
public static void build(Builder writer, Type type) {
if(type instanceof Leaf) {
writer.initialise(1);
writer.buildPrimitive(0,(Type.Leaf)type);
} else {
Compound graph = (Compound) type;
writer.initialise(graph.nodes.length);
Node[] nodes = graph.nodes;
for(int i=0;i!=nodes.length;++i) {
Node node = nodes[i];
switch (node.kind) {
case K_VOID:
writer.buildPrimitive(i,T_VOID);
break;
case K_ANY:
writer.buildPrimitive(i,T_ANY);
break;
case K_NULL:
writer.buildPrimitive(i,T_NULL);
break;
case K_BOOL:
writer.buildPrimitive(i,T_BOOL);
break;
case K_INT:
writer.buildPrimitive(i,T_INT);
break;
case K_RATIONAL:
writer.buildPrimitive(i,T_REAL);
break;
case K_SET:
writer.buildSet(i,(Integer) node.data);
break;
case K_LIST:
writer.buildList(i,(Integer) node.data);
break;
case K_PROCESS:
writer.buildProcess(i,(Integer) node.data);
break;
case K_EXISTENTIAL:
writer.buildExistential(i,(NameID) node.data);
break;
case K_DICTIONARY: {
// binary node
Pair<Integer, Integer> p = (Pair<Integer, Integer>) node.data;
writer.buildDictionary(i,p.first(),p.second());
break;
}
case K_UNION: {
int[] bounds = (int[]) node.data;
writer.buildUnion(i,bounds);
break;
}
case K_TUPLE: {
int[] bounds = (int[]) node.data;
writer.buildTuple(i,bounds);
break;
}
case K_FUNCTION: {
int[] bounds = (int[]) node.data;
int[] params = new int[bounds.length-2];
System.arraycopy(bounds, 2, params,0, params.length);
writer.buildFunction(i,bounds[0],bounds[1],params);
break;
}
case K_RECORD: {
// labeled nary node
Pair<String, Integer>[] fields = (Pair<String, Integer>[]) node.data;
writer.buildRecord(i,fields);
break;
}
default:
throw new IllegalArgumentException("Invalid type encountered");
}
}
}
}
// Type operations
/**
* A subtype relation encodes both a subtype and a supertype relation
* between two separate domains (called <code>from</code> and
* <code>to</code>).
*/
public final static class SubtypeRelation {
/**
* Indicates the size of the "source" domain.
*/
private final int fromDomain;
/**
* Indicates the size of the "target" domain.
*/
private final int toDomain;
/**
* Stores subtype relation as a binary matrix for dimenaion
* <code>fromDomain</code> x <code>toDomain</code>. This matrix
* <code>r</code> is organised into row-major order, where
* <code>r[i][j]</code> implies <code>i :> j</code>.
*/
private final BitSet subTypes;
/**
* Stores subtype relation as a binary matrix for dimenaion
* <code>fromDomain</code> x <code>toDomain</code>. This matrix
* <code>r</code> is organised into row-major order, where
* <code>r[i][j]</code> implies <code>i <: j</code>.
*/
private final BitSet superTypes;
public SubtypeRelation(int fromDomain, int toDomain) {
this.fromDomain = fromDomain;
this.toDomain = toDomain;
this.subTypes = new BitSet(fromDomain*toDomain);
this.superTypes = new BitSet(fromDomain*toDomain);
// Initially, set all sub- and super-types as true
subTypes.set(0,subTypes.size(),true);
superTypes.set(0,superTypes.size(),true);
}
/**
* Check whether a a given node is a subtype of another.
*
* @param from
* @param to
* @return
*/
public boolean isSubtype(int from, int to) {
return subTypes.get((toDomain*from) + to);
}
/**
* Check whether a a given node is a supertype of another.
*
* @param from
* @param to
* @return
*/
public boolean isSupertype(int from, int to) {
return superTypes.get((toDomain*from) + to);
}
/**
* Set the subtype flag for a given pair in the relation.
*
* @param from
* @param to
* @param flag
*/
public void setSubtype(int from, int to, boolean flag) {
subTypes.set((toDomain*from) + to,flag);
}
/**
* Set the supertype flag for a given pair in the relation.
*
* @param from
* @param to
* @param flag
*/
public void setSupertype(int from, int to, boolean flag) {
superTypes.set((toDomain*from) + to,flag);
}
public String toString() {
return toString(subTypes) + "\n" + toString(superTypes);
}
public String toString(BitSet matrix) {
String r = " |";
for(int i=0;i!=toDomain;++i) {
r = r + " " + (i%10);
}
r = r + "\n-+";
for(int i=0;i!=toDomain;++i) {
r = r + "
}
r = r + "\n";
for(int i=0;i!=fromDomain;++i) {
r = r + (i%10) + "|";;
for(int j=0;j!=toDomain;++j) {
if(matrix.get((i*toDomain)+j)) {
r += " 1";
} else {
r += " 0";
}
}
r = r + "\n";
}
return r;
}
}
/**
* A subtype inference is responsible for computing a complete subtype
* relation between two given graphs. The class is abstract because there
* are different possible implementations of this. In particular, the case
* when coercions are being considered, versus the case when they are not.
*
* @author djp
*
*/
public static abstract class SubtypeInference {
protected final Node[] fromGraph;
protected final Node[] toGraph;
protected final SubtypeRelation assumptions;
public SubtypeInference(Node[] fromGraph, Node[] toGraph) {
this.fromGraph = fromGraph;
this.toGraph = toGraph;
this.assumptions = new SubtypeRelation(fromGraph.length,toGraph.length);
}
public SubtypeRelation doInference() {
int fromDomain = fromGraph.length;
int toDomain = toGraph.length;
boolean changed = true;
while(changed) {
changed=false;
for(int i=0;i!=fromDomain;i++) {
for(int j=0;j!=toDomain;j++) {
boolean isubj = isSubType(i,j);
boolean isupj = isSuperType(i,j);
if(assumptions.isSubtype(i,j) && !isubj) {
assumptions.setSubtype(i,j,false);
changed = true;
}
if(assumptions.isSupertype(i,j) && !isupj) {
assumptions.setSupertype(i,j,false);
changed = true;
}
}
}
}
return assumptions;
}
public abstract boolean isSubType(int from, int to);
public abstract boolean isSuperType(int from, int to);
}
public static class DefaultSubtypeOperator extends SubtypeInference {
public DefaultSubtypeOperator(Node[] fromGraph, Node[] toGraph) {
super(fromGraph,toGraph);
}
public boolean isSubType(int from, int to) {
Node fromNode = fromGraph[from];
Node toNode = toGraph[to];
if(fromNode.kind == toNode.kind) {
switch(fromNode.kind) {
case K_EXISTENTIAL:
NameID nid1 = (NameID) fromNode.data;
NameID nid2 = (NameID) toNode.data;
return nid1.equals(nid2);
case K_SET:
case K_LIST:
case K_PROCESS: {
return assumptions.isSubtype((Integer) fromNode.data,(Integer) toNode.data);
}
case K_DICTIONARY: {
// binary node
Pair<Integer, Integer> p1 = (Pair<Integer, Integer>) fromNode.data;
Pair<Integer, Integer> p2 = (Pair<Integer, Integer>) toNode.data;
return assumptions.isSubtype(p1.first(),p2.first()) && assumptions.isSubtype(p1.second(),p2.second());
}
case K_TUPLE: {
// nary nodes
int[] elems1 = (int[]) fromNode.data;
int[] elems2 = (int[]) toNode.data;
if(elems1.length != elems2.length){ return false; }
for(int i=0;i<elems1.length;++i) {
if(!assumptions.isSubtype(elems1[i],elems2[i])) { return false; }
}
return true;
}
case K_FUNCTION: {
// nary nodes
int[] elems1 = (int[]) fromNode.data;
int[] elems2 = (int[]) toNode.data;
if(elems1.length != elems2.length){
return false;
}
// Check (optional) receiver value first (which is contravariant)
int e1 = elems1[0];
int e2 = elems2[0];
if((e1 == -1 || e2 == -1) && e1 != e2) {
return false;
} else if (e1 != -1 && e2 != -1
&& !assumptions.isSupertype(e1,e2)) {
return false;
}
// Check return value first (which is covariant)
e1 = elems1[1];
e2 = elems2[1];
if(!assumptions.isSubtype(e1,e2)) {
return false;
}
// Now, check parameters (which are contra-variant)
for(int i=2;i<elems1.length;++i) {
e1 = elems1[i];
e2 = elems2[i];
if(!assumptions.isSupertype(e1,e2)) {
return false;
}
}
return true;
}
case K_RECORD:
{
Pair<String, Integer>[] fields1 = (Pair<String, Integer>[]) fromNode.data;
Pair<String, Integer>[] fields2 = (Pair<String, Integer>[]) toNode.data;
if(fields1.length != fields2.length) {
return false;
}
for (int i = 0; i != fields1.length; ++i) {
Pair<String, Integer> e1 = fields1[i];
Pair<String, Integer> e2 = fields2[i];
if (!e1.first().equals(e2.first())
|| !assumptions.isSubtype(e1.second(),e2.second())) {
return false;
}
}
return true;
}
case K_UNION: {
int[] bounds2 = (int[]) toNode.data;
for(int j : bounds2) {
if(!assumptions.isSubtype(from,j)) { return false; }
}
return true;
}
case K_LABEL:
throw new IllegalArgumentException("attempting to minimise open recurisve type");
default:
// primitive types true immediately
return true;
}
} else if(fromNode.kind == K_ANY || toNode.kind == K_VOID) {
return true;
} else if(fromNode.kind == K_UNION) {
int[] bounds1 = (int[]) fromNode.data;
// check every bound in c1 is a subtype of some bound in c2.
for(int i : bounds1) {
if(assumptions.isSubtype(i,to)) {
return true;
}
}
return false;
} else if(toNode.kind == K_UNION) {
int[] bounds2 = (int[]) toNode.data;
// check some bound in c1 is a subtype of some bound in c2.
for(int j : bounds2) {
if(!assumptions.isSubtype(from,j)) {
return false;
}
}
return true;
}
return false;
}
public boolean isSuperType(int from, int to) {
Node fromNode = fromGraph[from];
Node toNode = toGraph[to];
if(fromNode.kind == toNode.kind) {
switch(fromNode.kind) {
case K_EXISTENTIAL:
NameID nid1 = (NameID) fromNode.data;
NameID nid2 = (NameID) toNode.data;
return nid1.equals(nid2);
case K_SET:
case K_LIST:
case K_PROCESS: {
return assumptions.isSupertype((Integer) fromNode.data,(Integer) toNode.data);
}
case K_DICTIONARY: {
// binary node
Pair<Integer, Integer> p1 = (Pair<Integer, Integer>) fromNode.data;
Pair<Integer, Integer> p2 = (Pair<Integer, Integer>) toNode.data;
return assumptions.isSupertype(p1.first(),p2.first()) && assumptions.isSupertype(p1.second(),p2.second());
}
case K_TUPLE: {
// nary nodes
int[] elems1 = (int[]) fromNode.data;
int[] elems2 = (int[]) toNode.data;
if(elems1.length != elems2.length){ return false; }
for(int i=0;i<elems1.length;++i) {
if(!assumptions.isSupertype(elems1[i],elems2[i])) { return false; }
}
return true;
}
case K_FUNCTION: {
// nary nodes
int[] elems1 = (int[]) fromNode.data;
int[] elems2 = (int[]) toNode.data;
if(elems1.length != elems2.length){
return false;
}
// Check (optional) receiver value first (which is contravariant)
int e1 = elems1[0];
int e2 = elems2[0];
if((e1 == -1 || e2 == -1) && e1 != e2) {
return false;
} else if (e1 != -1 && e2 != -1
&& !assumptions.isSubtype(e1,e2)) {
return false;
}
// Check return value first (which is covariant)
e1 = elems1[1];
e2 = elems2[1];
if(!assumptions.isSupertype(e1,e2)) {
return false;
}
// Now, check parameters (which are contra-variant)
for(int i=2;i<elems1.length;++i) {
e1 = elems1[i];
e2 = elems2[i];
if(!assumptions.isSubtype(e1,e2)) {
return false;
}
}
return true;
}
case K_RECORD:
{
Pair<String, Integer>[] fields1 = (Pair<String, Integer>[]) fromNode.data;
Pair<String, Integer>[] fields2 = (Pair<String, Integer>[]) toNode.data;
if(fields1.length != fields2.length) {
return false;
}
for (int i = 0; i != fields1.length; ++i) {
Pair<String, Integer> e1 = fields1[i];
Pair<String, Integer> e2 = fields2[i];
if (!e1.first().equals(e2.first())
|| !assumptions.isSupertype(e1.second(),e2.second())) {
return false;
}
}
return true;
}
case K_UNION: {
int[] bounds1 = (int[]) toNode.data;
// check every bound in c1 is a subtype of some bound in toNode.
for(int i : bounds1) {
if(!assumptions.isSupertype(i,to)) {
return false;
}
}
return true;
}
case K_LABEL:
throw new IllegalArgumentException("attempting to minimise open recurisve type");
default:
// primitive types true immediately
return true;
}
} else if(fromNode.kind == K_VOID || toNode.kind == K_ANY) {
return true;
} else if(fromNode.kind == K_UNION) {
int[] bounds1 = (int[]) fromNode.data;
// check every bound in c1 is a subtype of some bound in c2.
for(int i : bounds1) {
if(!assumptions.isSupertype(i,to)) {
return false;
}
}
return true;
} else if(toNode.kind == K_UNION) {
int[] bounds2 = (int[]) toNode.data;
// check some bound in c1 is a subtype of some bound in c2.
for(int j : bounds2) {
if(assumptions.isSupertype(from,j)) {
return true;
}
}
}
return false;
}
}
public static final class CoerciveSubtypeOperator extends DefaultSubtypeOperator {
public CoerciveSubtypeOperator(Node[] fromGraph, Node[] toGraph) {
super(fromGraph,toGraph);
}
public boolean isSubType(int from, int to) {
Node fromNode = fromGraph[from];
Node toNode = toGraph[to];
if(fromNode.kind == K_RATIONAL && toNode.kind == K_INT) {
return true;
} else {
return super.isSubType(from,to);
}
}
public boolean isSuperType(int from, int to) {
Node fromNode = fromGraph[from];
Node toNode = toGraph[to];
if(fromNode.kind == K_INT && toNode.kind == K_RATIONAL) {
return true;
} else {
return super.isSuperType(from,to);
}
}
/*
* follwing implements width subtyping and is disabled.
if(sign) {
// labeled nary nodes
Pair<String, Integer>[] _fields1 = (Pair<String, Integer>[]) c1.data;
Pair<String, Integer>[] fields2 = (Pair<String, Integer>[]) toNode.data;
HashMap<String,Integer> fields1 = new HashMap<String,Integer>();
for(Pair<String,Integer> f : _fields1) {
fields1.put(f.first(), f.second());
}
for (int i = 0; i != fields2.length; ++i) {
Pair<String, Integer> e2 = fields2[i];
Integer e1 = fields1.get(e2.first());
if (e1 == null
|| !subtypeMatrix.get((e1 * g2Size) + e2)) {
return false;
}
}
} else {
// labeled nary nodes
Pair<String, Integer>[] fields1 = (Pair<String, Integer>[]) c1.data;
Pair<String, Integer>[] _fields2 = (Pair<String, Integer>[]) toNode.data;
HashMap<String,Integer> fields2 = new HashMap<String,Integer>();
for(Pair<String,Integer> f : _fields2) {
fields2.put(f.first(), f.second());
}
for (int i = 0; i != fields1.length; ++i) {
Pair<String, Integer> e1 = fields1[i];
Integer e2 = fields2.get(e1.first());
if (e2 == null
|| !subtypeMatrix.get((e1.second() * g2Size) + e2)) {
return false;
}
}
*/
}
/**
* Determine whether type <code>t2</code> is a <i>subtype</i> of type
* <code>t1</code> (written t1 :> t2). In other words, whether the set of
* all possible values described by the type <code>t2</code> is a subset of
* that described by <code>t1</code>.
*/
public static boolean isSubtype(Type t1, Type t2) {
Node[] g1 = nodes(t1);
Node[] g2 = nodes(t2);
SubtypeInference inference = new DefaultSubtypeOperator(g1,g2);
SubtypeRelation rel = inference.doInference();
return rel.isSubtype(0, 0);
}
/**
* Determine whether type <code>t2</code> is a <i>coercive subtype</i> of
* type <code>t1</code> (written t1 :> t2). Note that it can happen where
* the following holds:
*
* <pre>
* !isSubtype(t1, t2) && isCoerciveSubtype(t1, t2)
* </pre>
*
* This case indicates that a <i>coercion</i> is needed to flow from
* <code>t2</code> to <code>t1</code>.
*/
public static boolean isCoerciveSubtype(Type t1, Type t2) {
Node[] g1 = nodes(t1);
Node[] g2 = nodes(t2);
SubtypeInference inference = new CoerciveSubtypeOperator(g1,g2);
SubtypeRelation rel = inference.doInference();
return rel.isSubtype(0, 0);
}
/**
* Check whether two types are <i>isomorphic</i>. This is true if they are
* identical, or encode the same structure.
*
* @param t1
* @param t2
* @return
*/
public static boolean isomorphic(Type t1, Type t2) {
return isSubtype(t1,t2) && isSubtype(t2,t1);
}
/**
* Compute the <i>least upper bound</i> of two types t1 and t2. The least upper
* bound is a type t3 where <code>t3 :> t1</code>, <code>t3 :> t2</code> and
* there does not exist a type t4, where <code>t3 :> t4</code>,
* <code>t4 :> t1</code>, <code>t4 :> t2</code>.
*
* @param t1
* @param t2
* @return
*/
public static Type leastUpperBound(Type t1, Type t2) {
return minimise(T_UNION(t1,t2)); // so easy
}
/**
* Compute the <i>greatest lower bound</i> of two types t1 and t2. The
* greatest lower bound is a type t3 where <code>t1 :> t3</code>,
* <code>t2 :> t3</code> and there does not exist a type t4, where
* <code>t1 :> t4</code>, <code>t2 :> t4</code> and <code>t4 :> t3</code>.
*
* @param t1
* @param t2
* @return
*/
public static Type greatestLowerBound(Type t1, Type t2) {
// BUG FIX: this algorithm still isn't implemented correctly.
if(isSubtype(t1,t2)) {
return t2;
} else if(isSubtype(t2,t1)) {
return t1;
} else {
Node[] graph1, graph2;
if(t1 instanceof Leaf) {
graph1 = new Node[] { new Node(leafKind((Type.Leaf) t1), null) };
} else {
graph1 = ((Compound)t1).nodes;
}
if(t2 instanceof Leaf) {
graph2 = new Node[] { new Node(leafKind((Type.Leaf) t2), null) };
} else {
graph2 = ((Compound)t2).nodes;
}
ArrayList<Node> newNodes = new ArrayList<Node>();
intersect(0,graph1,0,graph2,newNodes, new HashMap());
Type glb = construct(newNodes.toArray(new Node[newNodes.size()]));
return minimise(glb);
}
}
/**
* Let <code>S</code> be determined by subtracting the set of values
* described by type <code>t2</code> from that described by <code>t1</code>.
* Then, this method returns the <i>least</i> type <code>t3</code> which
* covers <code>S</code> (that is, every value in <code>S</code> is in the
* set of values described by <code>t3</code>). Unfortunately, in some
* cases, <code>t3</code> may contain other (spurious) values not found in
* <code>S</code>.
*
* @param t1
* @param t2
* @return
*/
public static Type leastDifference(Type t1, Type t2) {
// BUG FIX: this algorithm still isn't implemented correctly.
if(isSubtype(t2,t1)) {
return T_VOID;
} else {
Node[] graph1, graph2;
if(t1 instanceof Leaf) {
graph1 = new Node[] { new Node(leafKind((Type.Leaf) t1), null) };
} else {
graph1 = ((Compound)t1).nodes;
}
if(t2 instanceof Leaf) {
graph2 = new Node[] { new Node(leafKind((Type.Leaf) t2), null) };
} else {
graph2 = ((Compound)t2).nodes;
}
SubtypeRelation assumptions = new DefaultSubtypeOperator(graph1,graph2).doInference();
ArrayList<Node> newNodes = new ArrayList<Node>();
difference(0,graph1,0,graph2,newNodes, new HashMap(),assumptions);
Type ldiff = construct(newNodes.toArray(new Node[newNodes.size()]));
return minimise(ldiff);
}
}
/**
* The effective record type gives a subset of the visible fields which are
* guaranteed to be in the type. For example, consider this type:
*
* <pre>
* {int op, int x} | {int op, [int] y}
* </pre>
*
* Here, the field op is guaranteed to be present. Therefore, the effective
* record type is just <code>{int op}</code>.
*
* @param t
* @return
*/
public static Record effectiveRecordType(Type t) {
if (t instanceof Type.Record) {
return (Type.Record) t;
} else if (t instanceof Type.Union) {
Union ut = (Type.Union) t;
Record r = null;
for (Type b : ut.bounds()) {
if (!(b instanceof Record)) {
return null;
}
Record br = (Record) b;
if (r == null) {
r = br;
} else {
HashMap<String, Type> rfields = r.fields();
HashMap<String, Type> bfields = br.fields();
HashMap<String, Type> nfields = new HashMap();
for (Map.Entry<String, Type> e : rfields.entrySet()) {
Type bt = bfields.get(e.getKey());
if (bt != null) {
nfields.put(e.getKey(),
leastUpperBound(e.getValue(), bt));
}
}
r = T_RECORD(nfields);
}
}
return r;
}
return null;
}
public static Set effectiveSetType(Type t) {
if (t instanceof Type.Set) {
return (Type.Set) t;
} else if (t instanceof Type.Union) {
Union ut = (Type.Union) t;
Set r = null;
for (Type b : ut.bounds()) {
if (!(b instanceof Set)) {
return null;
}
Set br = (Set) b;
if (r == null) {
r = br;
} else {
r = T_SET(leastUpperBound(r.element(),br.element()));
}
}
return r;
}
return null;
}
public static List effectiveListType(Type t) {
if (t instanceof Type.List) {
return (Type.List) t;
} else if (t instanceof Type.Union) {
Union ut = (Type.Union) t;
List r = null;
for (Type b : ut.bounds()) {
if (!(b instanceof List)) {
return null;
}
List br = (List) b;
if (r == null) {
r = br;
} else {
r = T_LIST(leastUpperBound(r.element(),br.element()));
}
}
return r;
}
return null;
}
public static Dictionary effectiveDictionaryType(Type t) {
if (t instanceof Type.Dictionary) {
return (Type.Dictionary) t;
} else if (t instanceof Type.Union) {
Union ut = (Type.Union) t;
Dictionary r = null;
for (Type b : ut.bounds()) {
if (!(b instanceof Dictionary)) {
return null;
}
Dictionary br = (Dictionary) b;
if (r == null) {
r = br;
} else {
r = T_DICTIONARY(leastUpperBound(r.key(), br.key()),
leastUpperBound(r.value(), br.value()));
}
}
return r;
}
return null;
}
/**
* Minimise the given type to produce a fully minimised version.
*
* @param type
* @return
*/
public static Type minimise(Type type) {
// leaf types never need minmising!
if (type instanceof Leaf) {
return type;
}
// compound types need minimising.
Node[] nodes = ((Compound) type).nodes;
SubtypeRelation relation = new DefaultSubtypeOperator(nodes,nodes).doInference();
ArrayList<Node> newnodes = new ArrayList<Node>();
int[] allocated = new int[nodes.length];
//System.out.println("REBUILDING: " + type);
//build(new PrintBuilder(System.out),type);
//System.out.println(relation.toString());
rebuild(0, nodes, allocated, newnodes, relation);
return construct(newnodes.toArray(new Node[newnodes.size()]));
}
/**
* This method reconstructs a graph given a set of equivalent nodes. The
* equivalence classes for a node are determined by the given subtype
* matrix, whilst the allocate array identifies when a node has already been
* allocated for a given equivalence class.
*
* @param idx
* @param graph
* @param allocated
* @param newNodes
* @param matrix
* @return
*/
private static int rebuild(int idx, Node[] graph, int[] allocated,
ArrayList<Node> newNodes, SubtypeRelation assumptions) {
int graph_size = graph.length;
Node node = graph[idx];
int cidx = allocated[idx];
if(cidx > 0) {
// node already constructed for this equivalence class
return cidx - 1;
}
cidx = newNodes.size(); // my new index
// now, allocate all nodes in equivalence class
for(int i=0;i!=graph_size;++i) {
if(assumptions.isSubtype(i,idx) && assumptions.isSubtype(idx, i)) {
allocated[i] = cidx + 1;
}
}
newNodes.add(null); // reserve space for my node
Object data = null;
switch(node.kind) {
case K_EXISTENTIAL:
data = node.data;
break;
case K_SET:
case K_LIST:
case K_PROCESS: {
int element = (Integer) node.data;
data = (Integer) rebuild(element,graph,allocated,newNodes,assumptions);
break;
}
case K_DICTIONARY: {
Pair<Integer,Integer> p = (Pair) node.data;
int from = (Integer) rebuild(p.first(),graph,allocated,newNodes,assumptions);
int to = (Integer) rebuild(p.second(),graph,allocated,newNodes,assumptions);
data = new Pair(from,to);
break;
}
case K_TUPLE:
case K_FUNCTION: {
int[] elems = (int[]) node.data;
int[] nelems = new int[elems.length];
for(int i = 0; i!=elems.length;++i) {
if(elems[i] == -1) {
// possible for K_FUNCTION
nelems[i] = -1;
} else {
nelems[i] = (Integer) rebuild(elems[i],graph,allocated,newNodes,assumptions);
}
}
data = nelems;
break;
}
case K_RECORD: {
Pair<String, Integer>[] elems = (Pair[]) node.data;
Pair<String, Integer>[] nelems = new Pair[elems.length];
for (int i = 0; i != elems.length; ++i) {
Pair<String, Integer> p = elems[i];
int j = (Integer) rebuild(p.second(), graph, allocated,
newNodes, assumptions);
nelems[i] = new Pair<String, Integer>(p.first(), j);
}
data = nelems;
break;
}
case K_UNION: {
int[] elems = (int[]) node.data;
// The aim here is to try and remove equivalent nodes, and nodes
// which are subsumed by other nodes.
HashSet<Integer> nelems = new HashSet<Integer>();
for(int i : elems) { nelems.add(i); }
for(int i=0;i!=elems.length;i++) {
int n1 = elems[i];
for(int j=0;j<elems.length;j++) {
if(i==j) { continue; }
int n2 = elems[j];
if(assumptions.isSubtype(n1,n2) && (!assumptions.isSubtype(n2,n1) || i < j)) {
nelems.remove(n2);
}
}
}
// ok, let's see what we've got left
if (nelems.size() == 1) {
// ok, union node should be removed as it's entirely subsumed. I
// need to undo what I've already done in allocating a new node.
newNodes.remove(cidx);
for (int i = 0; i != graph_size; ++i) {
if(assumptions.isSubtype(i,idx) && assumptions.isSubtype(idx,i)) {
allocated[i] = 0;
}
}
return rebuild(nelems.iterator().next(), graph, allocated, newNodes,
assumptions);
} else {
// first off, we have to normalise this sucker
ArrayList<Integer> nnelems = new ArrayList(nelems);
Collections.sort(nnelems,new MinimiseComparator(graph,assumptions));
// ok, now rebuild
int[] melems = new int[nelems.size()];
int i=0;
for (Integer j : nnelems) {
melems[i++] = (Integer) rebuild(j, graph,
allocated, newNodes, assumptions);
}
data = melems;
}
break;
}
}
// finally, create the new node!!!
newNodes.set(cidx, new Node(node.kind,data));
return cidx;
}
private static final class MinimiseComparator implements Comparator<Integer> {
private Node[] graph;
private SubtypeRelation subtypeMatrix;
public MinimiseComparator(Node[] graph, SubtypeRelation matrix) {
this.graph = graph;
this.subtypeMatrix = matrix;
}
public int compare(Integer a, Integer b) {
Node n1 = graph[a];
Node n2 = graph[b];
if(n1.kind < n2.kind) {
return -1;
} else if(n1.kind > n2.kind) {
return 1;
} else {
// First try subtype relation
if (subtypeMatrix.isSubtype(b,a)) {
return -1;
} else if (subtypeMatrix.isSubtype(a,b)) {
return 1;
}
// Second try harder stuff
Object data1 = n1.data;
Object data2 = n2.data;
switch(n1.kind){
case K_VOID:
case K_ANY:
case K_META:
case K_NULL:
case K_BOOL:
case K_INT:
case K_RATIONAL:
return 0;
case K_EXISTENTIAL: {
String s1 = (String) data1;
String s2 = (String) data2;
return s1.compareTo(s2);
}
case K_RECORD: {
Pair[] fields1 = (Pair[]) data1;
Pair[] fields2 = (Pair[]) data2;
if(fields1.length < fields2.length) {
return -1;
} else if(fields1.length > fields2.length) {
return 1;
}
// FIXME: could presumably do more here.
}
// FIXME: could do more here!!
}
if(a < b) {
return -1;
} else if(a > b) {
return 1;
} else {
return 0;
}
}
}
}
private static int intersect(int n1, Node[] graph1, int n2, Node[] graph2,
ArrayList<Node> newNodes,
HashMap<Pair<Integer, Integer>, Integer> allocations) {
Integer idx = allocations.get(new Pair(n1,n2));
if(idx != null) {
// this indicates an allocation has already been performed for this
// pair.
return idx;
}
Node c1 = graph1[n1];
Node c2 = graph2[n2];
int nid = newNodes.size(); // my node id
newNodes.add(null); // reserve space for my node
allocations.put(new Pair(n1,n2), nid);
Node node; // new node being created
if(c1.kind == c2.kind) {
switch(c1.kind) {
case K_VOID:
case K_ANY:
case K_META:
case K_NULL:
case K_BOOL:
case K_INT:
case K_RATIONAL:
node = c1;
break;
case K_EXISTENTIAL:
NameID nid1 = (NameID) c1.data;
NameID nid2 = (NameID) c2.data;
if(nid1.name().equals(nid2.name())) {
node = c1;
} else {
node = new Node(K_VOID,null);
}
break;
case K_SET:
case K_LIST:
case K_PROCESS: {
// unary node
int e1 = (Integer) c1.data;
int e2 = (Integer) c2.data;
int element = intersect(e1,graph1,e2,graph2,newNodes,allocations);
node = new Node(c1.kind,element);
break;
}
case K_DICTIONARY: {
// binary node
Pair<Integer, Integer> p1 = (Pair<Integer, Integer>) c1.data;
Pair<Integer, Integer> p2 = (Pair<Integer, Integer>) c2.data;
int key = intersect(p1.first(),graph2,p2.first(),graph2,newNodes,allocations);
int value = intersect(p1.second(),graph2,p2.second(),graph2,newNodes,allocations);
node = new Node(K_DICTIONARY,new Pair(key,value));
break;
}
case K_TUPLE: {
// nary nodes
int[] elems1 = (int[]) c1.data;
int[] elems2 = (int[]) c2.data;
if(elems1.length != elems2.length) {
// TODO: can we do better here?
node = new Node(K_VOID,null);
} else {
int[] nelems = new int[elems1.length];
for(int i=0;i!=nelems.length;++i) {
nelems[i] = intersect(elems1[i],graph1,elems2[i],graph2,newNodes,allocations);
}
node = new Node(K_TUPLE,nelems);
}
break;
}
case K_FUNCTION: {
// nary nodes
int[] elems1 = (int[]) c1.data;
int[] elems2 = (int[]) c2.data;
int e1 = elems1[0];
int e2 = elems2[0];
if(elems1.length != elems2.length){
node = new Node(K_VOID,null);
} else if ((e1 == -1 || e2 == -1) && e1 != e2) {
node = new Node(K_VOID, null);
} else {
int[] nelems = new int[elems1.length];
// TODO: need to check here whether or not this is the right
// thing to do. My gut is telling me that covariant and
// contravariance should be treated differently ...
for (int i = 0; i != nelems.length; ++i) {
nelems[i] = intersect(elems1[i], graph1, elems2[i],
graph2, newNodes,allocations);
}
node = new Node(K_FUNCTION, nelems);
}
break;
}
case K_RECORD:
// labeled nary nodes
outer: {
Pair<String, Integer>[] fields1 = (Pair<String, Integer>[]) c1.data;
Pair<String, Integer>[] fields2 = (Pair<String, Integer>[]) c2.data;
int old = newNodes.size();
if(fields1.length != fields2.length) {
node = new Node(K_VOID,null);
} else {
Pair<String, Integer>[] nfields = new Pair[fields1.length];
for (int i = 0; i != nfields.length; ++i) {
Pair<String,Integer> e1 = fields1[i];
Pair<String,Integer> e2 = fields2[i];
if (!e1.first().equals(e2.first())) {
node = new Node(K_VOID, null);
break outer;
} else {
int nidx = intersect(e1.second(), graph1, e2.second(), graph2, newNodes,
allocations);
if (newNodes.get(nidx).kind == K_VOID) {
// A record with a field of void type cannot
while (newNodes.size() != old) {
newNodes.remove(newNodes.size() - 1);
}
node = new Node(K_VOID, null);
break outer;
}
nfields[i] = new Pair<String, Integer>(e1.first(), nidx);
}
}
node = new Node(K_RECORD, nfields);
}
}
break;
case K_UNION: {
// This is the hardest (i.e. most expensive) case. Essentially, I
// just check that for each bound in one node, there is an
// equivalent bound in the other.
int[] bounds1 = (int[]) c1.data;
int[] nbounds = new int[bounds1.length];
// check every bound in c1 is a subtype of some bound in c2.
for (int i = 0; i != bounds1.length; ++i) {
nbounds[i] = intersect(bounds1[i], graph1, n2, graph2,
newNodes,allocations);
}
node = new Node(K_UNION,nbounds);
break;
}
default:
throw new IllegalArgumentException("attempting to minimise open recurisve type");
}
} else if(c1.kind == K_INT && c2.kind == K_RATIONAL) {
node = new Node(K_INT,null);
} else if(c1.kind == K_RATIONAL && c2.kind == K_INT) {
node = new Node(K_INT,null);
} else if(c1.kind == K_ANY) {
newNodes.remove(newNodes.size()-1);
extractOnto(n2,graph2,newNodes);
return nid;
} else if(c2.kind == K_ANY) {
newNodes.remove(newNodes.size()-1);
extractOnto(n1,graph1,newNodes);
return nid;
} else if (c1.kind == K_UNION){
int[] obounds = (int[]) c1.data;
int[] nbounds = new int[obounds.length];
// check every bound in c1 is a subtype of some bound in c2.
for (int i = 0; i != obounds.length; ++i) {
nbounds[i] = intersect(obounds[i], graph1, n2, graph2,
newNodes,allocations);
}
node = new Node(K_UNION,nbounds);
} else if (c2.kind == K_UNION) {
int[] obounds = (int[]) c2.data;
int[] nbounds = new int[obounds.length];
// check every bound in c1 is a subtype of some bound in c2.
for (int i = 0; i != obounds.length; ++i) {
nbounds[i] = intersect(n1,graph1,obounds[i], graph2,
newNodes,allocations);
}
node = new Node(K_UNION,nbounds);
} else {
// default case --> go to void
node = new Node(K_VOID,null);
}
// finally, create the new node!!!
newNodes.set(nid, node);
return nid;
}
private static int difference(int n1, Node[] graph1, int n2, Node[] graph2,
ArrayList<Node> newNodes,
HashMap<Pair<Integer, Integer>, Integer> allocations, SubtypeRelation matrix) {
int nid = newNodes.size(); // my node id
if(matrix.isSupertype(n1,n2)) {
newNodes.add(new Node(K_VOID,null));
return nid;
}
Integer idx = allocations.get(new Pair(n1,n2));
if(idx != null) {
// this indicates an allocation has already been performed for this
// pair.
return idx;
}
Node c1 = graph1[n1];
Node c2 = graph2[n2];
allocations.put(new Pair(n1,n2), nid);
newNodes.add(null); // reserve space for my node
Node node; // new node being created
if(c1.kind == c2.kind) {
switch(c1.kind) {
case K_VOID:
case K_ANY:
case K_META:
case K_NULL:
case K_BOOL:
case K_INT:
case K_RATIONAL:
node = new Node(K_VOID,null);
break;
case K_EXISTENTIAL:
NameID nid1 = (NameID) c1.data;
NameID nid2 = (NameID) c2.data;
if(nid1.name().equals(nid2.name())) {
node = new Node(K_VOID,null);
} else {
node = c1;
}
break;
case K_SET:
case K_LIST:
case K_PROCESS: {
// unary node
int e1 = (Integer) c1.data;
int e2 = (Integer) c2.data;
int element = difference(e1,graph1,e2,graph2,newNodes,allocations,matrix);
node = new Node(c1.kind,element);
break;
}
case K_DICTIONARY: {
// binary node
Pair<Integer, Integer> p1 = (Pair<Integer, Integer>) c1.data;
Pair<Integer, Integer> p2 = (Pair<Integer, Integer>) c2.data;
int key = difference(p1.first(),graph2,p2.first(),graph2,newNodes,allocations,matrix);
int value = difference(p1.second(),graph2,p2.second(),graph2,newNodes,allocations,matrix);
node = new Node(K_DICTIONARY,new Pair(key,value));
break;
}
case K_TUPLE: {
// nary nodes
int[] elems1 = (int[]) c1.data;
int[] elems2 = (int[]) c2.data;
if(elems1.length != elems2.length) {
node = c1;
} else {
int[] nelems = new int[elems1.length];
for(int i=0;i!=nelems.length;++i) {
nelems[i] = difference(elems1[i],graph1,elems2[i],graph2,newNodes,allocations,matrix);
}
node = new Node(K_TUPLE,nelems);
}
break;
}
case K_FUNCTION: {
// nary nodes
int[] elems1 = (int[]) c1.data;
int[] elems2 = (int[]) c2.data;
int e1 = elems1[0];
int e2 = elems2[0];
if(elems1.length != elems2.length){
node = c1;
} else if ((e1 == -1 || e2 == -1) && e1 != e2) {
node = c1;
} else {
int[] nelems = new int[elems1.length];
// TODO: need to check here whether or not this is the right
// thing to do. My gut is telling me that covariant and
// contravariance should be treated differently ...
for (int i = 0; i != nelems.length; ++i) {
nelems[i] = difference(elems1[i], graph1, elems2[i],
graph2, newNodes,allocations,matrix);
}
node = new Node(K_FUNCTION, nelems);
}
break;
}
case K_RECORD:
// labeled nary nodes
Pair<String, Integer>[] fields1 = (Pair<String, Integer>[]) c1.data;
Pair<String, Integer>[] fields2 = (Pair<String, Integer>[]) c2.data;
if(fields1.length != fields2.length) {
node = c1;
} else {
outer: {
Pair<String, Integer>[] nfields = new Pair[fields1.length];
// FIXME: need to support WIDTH subtyping here.
for (int i = 0; i != fields1.length; ++i) {
Pair<String, Integer> e1 = fields1[i];
Pair<String, Integer> e2 = fields2[i];
if (!e1.first().equals(e2.first())) {
node = c1;
break outer;
} else {
nfields[i] = new Pair<String, Integer>(
e1.first(), difference(e1.second(),
graph1, e2.second(), graph2,
newNodes,allocations,matrix));
}
}
node = new Node(K_RECORD, nfields);
}
}
break;
case K_UNION: {
// This is the hardest (i.e. most expensive) case. Essentially, I
// just check that for each bound in one node, there is an
// equivalent bound in the other.
int[] bounds1 = (int[]) c1.data;
int[] nbounds = new int[bounds1.length];
// check every bound in c1 is a subtype of some bound in c2.
for (int i = 0; i != bounds1.length; ++i) {
nbounds[i] = difference(bounds1[i], graph1, n2, graph2,
newNodes,allocations,matrix);
}
node = new Node(K_UNION,nbounds);
break;
}
default:
throw new IllegalArgumentException("attempting to minimise open recurisve type");
}
} else if(c1.kind == K_INT && c2.kind == K_RATIONAL) {
// this is obviously imprecise
node = new Node(K_VOID,null);
} else if(c1.kind == K_RATIONAL && c2.kind == K_INT) {
// this is obviously imprecise
node = new Node(K_RATIONAL,null);
} else if(c1.kind == K_ANY) {
// TODO: try to do better
node = new Node(K_ANY,null);
} else if(c2.kind == K_ANY) {
node = new Node(K_VOID,null);
} else if (c1.kind == K_UNION){
int[] obounds = (int[]) c1.data;
int[] nbounds = new int[obounds.length];
for (int i = 0; i != obounds.length; ++i) {
nbounds[i] = difference(obounds[i], graph1, n2, graph2,
newNodes,allocations,matrix);
}
node = new Node(K_UNION,nbounds);
} else if (c2.kind == K_UNION) {
int[] obounds = (int[]) c2.data;
int[] nbounds = new int[obounds.length];
for (int i = 0; i != obounds.length; ++i) {
nbounds[i] = difference(n1,graph1,obounds[i], graph2,
newNodes,allocations,matrix);
}
// FIXME: this is broken. need intersection types.
node = new Node(K_UNION,nbounds);
} else {
// default case --> go to no change
node = c1;
}
if(node == c1) {
while(newNodes.size() > nid) {
newNodes.remove(newNodes.size()-1);
}
extractOnto(n1,graph1,newNodes);
return nid;
} else {
// finally, create the new node!!!
newNodes.set(nid, node);
return nid;
}
}
// Primitive Types
/**
* A leaf type represents a type which has no component types. For example,
* primitive types like <code>int</code> and <code>real</code> are leaf
* types.
*
* @author djp
*
*/
public static class Leaf extends Type {}
/**
* A void type represents the type whose variables cannot exist! That is,
* they cannot hold any possible value. Void is used to represent the return
* type of a function which does not return anything. However, it is also
* used to represent the element type of an empty list of set. <b>NOTE:</b>
* the void type is a subtype of everything; that is, it is bottom in the
* type lattice.
*
* @author djp
*
*/
public static final class Void extends Leaf {
private Void() {}
public boolean equals(Object o) {
return this == o;
}
public int hashCode() {
return 1;
}
public String toString() {
return "void";
}
}
/**
* The type any represents the type whose variables may hold any possible
* value. <b>NOTE:</b> the any type is top in the type lattice.
*
* @author djp
*
*/
public static final class Any extends Leaf {
private Any() {}
public boolean equals(Object o) {
return o == T_ANY;
}
public int hashCode() {
return 1;
}
public String toString() {
return "any";
}
}
/**
* The null type is a special type which should be used to show the absence
* of something. It is distinct from void, since variables can hold the
* special <code>null</code> value (where as there is no special "void"
* value). With all of the problems surrounding <code>null</code> and
* <code>NullPointerException</code>s in languages like Java and C, it may
* seem that this type should be avoided. However, it remains a very useful
* abstraction to have around and, in Whiley, it is treated in a completely
* safe manner (unlike e.g. Java).
*
* @author djp
*
*/
public static final class Null extends Leaf {
private Null() {}
public boolean equals(Object o) {
return this == o;
}
public int hashCode() {
return 2;
}
public String toString() {
return "null";
}
}
/**
* The type mets represents the type of types. That is, values of this type
* are themselves types. (think reflection, where we have
* <code>class Class {}</code>).
*
* @author djp
*
*/
public static final class Meta extends Leaf {
private Meta() {}
public boolean equals(Object o) {
return o == T_META;
}
public int hashCode() {
return 1;
}
public String toString() {
return "type";
}
}
/**
* The existential type represents the an unknown type, defined at a given
* position.
*
* @author djp
*
*/
public static final class Existential extends Compound{
private Existential(NameID name) {
super(new Node[] { new Node(K_EXISTENTIAL,name) });
}
public boolean equals(Object o) {
if(o instanceof Existential) {
Existential e = (Existential) o;
return nodes[0].data.equals(nodes[0].data);
}
return false;
}
public NameID name() {
return (NameID) nodes[0].data;
}
public int hashCode() {
return nodes[0].data.hashCode();
}
public String toString() {
return "?" + name();
}
}
/**
* Represents the set of boolean values (i.e. true and false)
* @author djp
*
*/
public static final class Bool extends Leaf {
private Bool() {}
public boolean equals(Object o) {
return o == T_BOOL;
}
public int hashCode() {
return 3;
}
public String toString() {
return "bool";
}
}
/**
* Represents the set of (unbound) integer values. Since integer types in
* Whiley are unbounded, there is no equivalent to Java's
* <code>MIN_VALUE</code> and <code>MAX_VALUE</code> for <code>int</code>
* types.
*
* @author djp
*
*/
public static final class Int extends Leaf {
private Int() {}
public boolean equals(Object o) {
return o == T_INT;
}
public int hashCode() {
return 4;
}
public String toString() {
return "int";
}
}
/**
* Represents the set of (unbound) rational numbers.
*
* @author djp
*
*/
public static final class Real extends Leaf {
private Real() {}
public boolean equals(Object o) {
return o == T_REAL;
}
public int hashCode() {
return 5;
}
public String toString() {
return "real";
}
}
// Compound Type
/**
* A Compound data structure is essentially a graph encoding of a type. Each
* node in the graph corresponds to a component of the type. Recursive
* cycles in the graph are permitted. <b>NOTE:</b> care must be take to
* ensure that the root of the graph (namely node at index 0) matches the
* actual Compound class (i.e. if its kind is K_SET, then this is an
* instance of Set).
*/
private static class Compound extends Type {
protected final Node[] nodes;
public Compound(Node[] nodes) {
this.nodes = nodes;
}
/**
* Determine the hashCode of a type.
*/
public int hashCode() {
int r = 0;
for(Node c : nodes) {
r = r + c.hashCode();
}
return r;
}
/**
* This method compares two compound types to test whether they are
* <i>identical</i>. Observe that it does not perform an
* <i>isomorphism</i> test. Thus, two distinct types which are
* structurally isomorphic will <b>not</b> be considered equal under
* this method. <b>NOTE:</b> to test whether two types are structurally
* isomorphic, using the <code>isomorphic(t1,t2)</code> method.
*/
public boolean equals(Object o) {
if(o instanceof Compound) {
Node[] cs = ((Compound) o).nodes;
if(cs.length != nodes.length) {
return false;
}
for(int i=0;i!=cs.length;++i) {
if(!nodes[i].equals(cs[i])) {
return false;
}
}
return true;
}
return false;
}
protected final Type extract(int root) {
return construct(Type.extract(root,nodes));
}
public String toString() {
// First, we need to find the headers of the computation. This is
// necessary in order to mark the start of a recursive type.
BitSet headers = new BitSet(nodes.length);
BitSet visited = new BitSet(nodes.length);
BitSet onStack = new BitSet(nodes.length);
findHeaders(0,visited,onStack,headers,nodes);
visited.clear();
String[] titles = new String[nodes.length];
int count = 0;
for(int i=0;i!=nodes.length;++i) {
if(headers.get(i)) {
titles[i] = headerTitle(count++);
}
}
return Type.toString(0,visited,titles,nodes);
}
}
private static final Node[] extract(int root, Node[] nodes) {
// First, we perform the DFS.
BitSet visited = new BitSet(nodes.length);
// extracted maps new indices to old indices
ArrayList<Integer> extracted = new ArrayList<Integer>();
subgraph(root,visited,extracted,nodes);
// rextracted is the reverse of extracted
int[] rextracted = new int[nodes.length];
int i=0;
for(int j : extracted) {
rextracted[j]=i++;
}
Node[] newNodes = new Node[extracted.size()];
i=0;
for(int j : extracted) {
newNodes[i++] = remap(nodes[j],rextracted);
}
return newNodes;
}
private static final void extractOnto(int root, Node[] nodes,
ArrayList<Node> newNodes) {
// First, we perform the DFS.
BitSet visited = new BitSet(nodes.length);
// extracted maps new indices to old indices
ArrayList<Integer> extracted = new ArrayList<Integer>();
subgraph(root, visited, extracted, nodes);
// rextracted is the reverse of extracted
int[] rextracted = new int[nodes.length];
int i = newNodes.size();
for (int j : extracted) {
rextracted[j] = i++;
}
for (int j : extracted) {
newNodes.add(remap(nodes[j], rextracted));
}
}
private final static void subgraph(int index, BitSet visited,
ArrayList<Integer> extracted, Node[] graph) {
if(visited.get(index)) { return; } // node already visited}
extracted.add(index);
visited.set(index);
Node node = graph[index];
switch(node.kind) {
case K_SET:
case K_LIST:
case K_PROCESS:
// unary nodes
subgraph((Integer) node.data,visited,extracted,graph);
break;
case K_DICTIONARY:
// binary node
Pair<Integer,Integer> p = (Pair<Integer,Integer>) node.data;
subgraph(p.first(),visited,extracted,graph);
subgraph(p.second(),visited,extracted,graph);
break;
case K_TUPLE:
case K_UNION:
case K_FUNCTION:
// nary node
int[] bounds = (int[]) node.data;
for(int b : bounds) {
if(b == -1) { continue; } // possible with K_FUNCTION
subgraph(b,visited,extracted,graph);
}
break;
case K_RECORD:
// labeled nary node
Pair<String,Integer>[] fields = (Pair<String,Integer>[]) node.data;
for(Pair<String,Integer> f : fields) {
subgraph(f.second(),visited,extracted,graph);
}
break;
}
}
private final static void findHeaders(int index, BitSet visited,
BitSet onStack, BitSet headers, Node[] graph) {
if(visited.get(index)) {
// node already visited
if(onStack.get(index)) {
headers.set(index);
}
return;
}
onStack.set(index);
visited.set(index);
Node node = graph[index];
switch(node.kind) {
case K_SET:
case K_LIST:
case K_PROCESS:
// unary nodes
findHeaders((Integer) node.data,visited,onStack,headers,graph);
break;
case K_DICTIONARY:
// binary node
Pair<Integer,Integer> p = (Pair<Integer,Integer>) node.data;
findHeaders(p.first(),visited,onStack,headers,graph);
findHeaders(p.second(),visited,onStack,headers,graph);
break;
case K_TUPLE:
case K_UNION:
case K_FUNCTION:
// nary node
int[] bounds = (int[]) node.data;
for(int b : bounds) {
if(b == -1) { continue; } // possible with K_FUNCTION
findHeaders(b,visited,onStack,headers,graph);
}
break;
case K_RECORD:
// labeled nary node
Pair<String,Integer>[] fields = (Pair<String,Integer>[]) node.data;
for(Pair<String,Integer> f : fields) {
findHeaders(f.second(),visited,onStack,headers,graph);
}
break;
}
onStack.set(index,false);
}
private final static String toString(int index, BitSet visited,
String[] headers, Node[] graph) {
if (visited.get(index)) {
// node already visited
return headers[index];
} else if(headers[index] != null) {
visited.set(index);
}
Node node = graph[index];
String middle;
switch (node.kind) {
case K_VOID:
return "void";
case K_ANY:
return "any";
case K_NULL:
return "null";
case K_BOOL:
return "bool";
case K_INT:
return "int";
case K_RATIONAL:
return "rat";
case K_SET:
middle = "{" + toString((Integer) node.data, visited, headers, graph)
+ "}";
break;
case K_LIST:
middle = "[" + toString((Integer) node.data, visited, headers, graph)
+ "]";
break;
case K_EXISTENTIAL:
middle = "?" + node.data.toString();
break;
case K_PROCESS:
middle = "*" + toString((Integer) node.data, visited, headers, graph);
break;
case K_DICTIONARY: {
// binary node
Pair<Integer, Integer> p = (Pair<Integer, Integer>) node.data;
String k = toString(p.first(), visited, headers, graph);
String v = toString(p.second(), visited, headers, graph);
middle = "{" + k + "->" + v + "}";
break;
}
case K_UNION: {
int[] bounds = (int[]) node.data;
middle = "";
for (int i = 0; i != bounds.length; ++i) {
if (i != 0) {
middle += "|";
}
middle += toString(bounds[i], visited, headers, graph);
}
break;
}
case K_TUPLE: {
middle = "";
int[] bounds = (int[]) node.data;
for (int i = 0; i != bounds.length; ++i) {
if (i != 0) {
middle += ",";
}
middle += toString(bounds[i], visited, headers, graph);
}
middle = "(" + middle + ")";
break;
}
case K_FUNCTION: {
middle = "";
int[] bounds = (int[]) node.data;
String rec = bounds[0] == -1 ? null : toString(bounds[0],visited,headers,graph);
String ret = toString(bounds[1], visited, headers, graph);
for (int i = 2; i != bounds.length; ++i) {
if (i != 2) {
middle += ",";
}
middle += toString(bounds[i], visited, headers, graph);
}
if(rec != null) {
middle = rec + "::" + ret + "(" + middle + ")";
} else {
middle = ret + "(" + middle + ")";
}
break;
}
case K_RECORD: {
// labeled nary node
middle = "{";
Pair<String, Integer>[] fields = (Pair<String, Integer>[]) node.data;
for (int i = 0; i != fields.length; ++i) {
if (i != 0) {
middle += ",";
}
Pair<String, Integer> f = fields[i];
middle += toString(f.second(), visited, headers, graph) + " " + f.first();
}
middle = middle + "}";
break;
}
default:
throw new IllegalArgumentException("Invalid type encountered");
}
// Finally, check whether this is a header node, or not. If it is a
// header then we need to insert the recursive type.
String header = headers[index];
if(header != null) {
return header + "<" + middle + ">";
} else {
return middle;
}
}
private static final char[] headers = { 'X','Y','Z','U','V','W','L','M','N','O','P','Q','R','S','T'};
private static String headerTitle(int count) {
String r = Character.toString(headers[count%headers.length]);
int n = count / headers.length;
if(n > 0) {
return r + n;
} else {
return r;
}
}
// Compound Faces
/*
* The compound faces are not technically necessary, as they simply provide
* interfaces to the underlying nodes of a compound type. However, they
* certainly make it more pleasant to use this library.
*/
/**
* A tuple type describes a compound type made up of two or more
* subcomponents. It is similar to a record, except that fields are
* effectively anonymous.
*
* @author djp
*
*/
public static final class Tuple extends Compound {
private Tuple(Node[] nodes) {
super(nodes);
}
public java.util.List<Type> elements() {
int[] values = (int[]) nodes[0].data;
ArrayList<Type> elems = new ArrayList<Type>();
for(Integer i : values) {
elems.add(extract(i));
}
return elems;
}
}
/**
* A set type describes set values whose elements are subtypes of the
* element type. For example, <code>{1,2,3}</code> is an instance of set
* type <code>{int}</code>; however, <code>{1.345}</code> is not.
*
* @author djp
*
*/
public static final class Set extends Compound {
private Set(Node[] nodes) {
super(nodes);
}
public Type element() {
return extract(1);
}
}
/**
* A list type describes list values whose elements are subtypes of the
* element type. For example, <code>[1,2,3]</code> is an instance of list
* type <code>[int]</code>; however, <code>[1.345]</code> is not.
*
* @author djp
*
*/
public static final class List extends Compound {
private List(Node[] nodes) {
super(nodes);
}
public Type element() {
return extract(1);
}
}
/**
* A process represents a reference to an actor in Whiley.
*
* @author djp
*
*/
public static final class Process extends Compound {
private Process(Node[] nodes) {
super(nodes);
}
public Type element() {
int i = (Integer) nodes[0].data;
return extract(i);
}
}
/**
* A dictionary represents a one-many mapping from variables of one type to
* variables of another type. For example, the dictionary type
* <code>int->real</code> represents a map from integers to real values. A
* valid instance of this type might be <code>{1->1.2,2->3}</code>.
*
* @author djp
*
*/
public static final class Dictionary extends Compound {
private Dictionary(Node[] nodes) {
super(nodes);
}
public Type key() {
Pair<Integer,Integer> p = (Pair) nodes[0].data;
return extract(p.first());
}
public Type value() {
Pair<Integer,Integer> p = (Pair) nodes[0].data;
return extract(p.second());
}
}
/**
* A record is made up of a number of fields, each of which has a unique
* name. Each field has a corresponding type. One can think of a record as a
* special kind of "fixed" dictionary (i.e. where we know exactly which
* entries we have).
*
* @author djp
*
*/
public static final class Record extends Compound {
private Record(Node[] nodes) {
super(nodes);
}
/**
* Extract just the key set for this field. This is a cheaper operation
* than extracting the keys and their types (since types must be
* extracted).
*
* @return
*/
public HashSet<String> keys() {
Pair<String,Integer>[] fields = (Pair[]) nodes[0].data;
HashSet<String> r = new HashSet<String>();
for(Pair<String,Integer> f : fields) {
r.add(f.first());
}
return r;
}
/**
* Return a mapping from field names to their types.
*
* @return
*/
public HashMap<String,Type> fields() {
Pair<String,Integer>[] fields = (Pair[]) nodes[0].data;
HashMap<String,Type> r = new HashMap<String,Type>();
for(Pair<String,Integer> f : fields) {
r.put(f.first(),extract(f.second()));
}
return r;
}
}
/**
* A union type represents a type whose variables may hold values from any
* of its "bounds". For example, the union type null|int indicates a
* variable can either hold an integer value, or null. <b>NOTE:</b>There
* must be at least two bounds for a union type to make sense.
*
* @author djp
*
*/
public static final class Union extends Compound {
private Union(Node[] nodes) {
super(nodes);
}
/**
* Return the bounds of this union type.
*
* @return
*/
public HashSet<Type> bounds() {
HashSet<Type> r = new HashSet<Type>();
// FIXME: this is a bit of a cludge. The essential idea is to
// flattern unions, so we never see a union of unions. This is
// helpful for simplifying various algorithms which use them
Stack<Union> stack = new Stack<Union>();
stack.add(this);
while(!stack.isEmpty()) {
Union u = stack.pop();
int[] fields = (int[]) u.nodes[0].data;
for(int i : fields) {
Type b = u.extract(i);
if(b instanceof Union) {
stack.add((Union)b);
} else {
r.add(b);
}
}
}
return r;
}
}
/**
* A function type, consisting of a list of zero or more parameters and a
* return type.
*
* @author djp
*
*/
public static final class Fun extends Compound {
Fun(Node[] nodes) {
super(nodes);
}
/**
* Get the return type of this function type.
*
* @return
*/
public Type ret() {
int[] fields = (int[]) nodes[0].data;
return extract(fields[1]);
}
/**
* Get the return type of this function type.
*
* @return
*/
public Type receiver() {
int[] fields = (int[]) nodes[0].data;
int r = fields[0];
if(r == -1) { return null; }
return extract(r);
}
/**
* Get the parameter types of this function type.
*
* @return
*/
public ArrayList<Type> params() {
int[] fields = (int[]) nodes[0].data;
ArrayList<Type> r = new ArrayList<Type>();
for(int i=2;i<fields.length;++i) {
r.add(extract(fields[i]));
}
return r;
}
}
// Components
private static final byte K_VOID = 0;
private static final byte K_ANY = 1;
private static final byte K_META = 2;
private static final byte K_NULL = 3;
private static final byte K_BOOL = 4;
private static final byte K_INT = 5;
private static final byte K_RATIONAL = 6;
private static final byte K_TUPLE = 7;
private static final byte K_SET = 8;
private static final byte K_LIST = 9;
private static final byte K_DICTIONARY = 10;
private static final byte K_PROCESS = 11;
private static final byte K_RECORD = 12;
private static final byte K_UNION = 13;
private static final byte K_FUNCTION = 14;
private static final byte K_EXISTENTIAL = 15;
private static final byte K_LABEL = 16;
/**
* Represents a node in the type graph. Each node has a kind, along with a
* data value identifying any children. For set, list and reference kinds
* the data value is an Integer; for records, it's a Pair<String,Integer>[]
* (sorted by key). For dictionaries, it's a Pair<Integer,Integer> and, for
* unions and functions it's int[] (for functions first element is return).
*
* @author djp
*
*/
private static final class Node {
final byte kind;
final Object data;
public Node(byte kind, Object data) {
this.kind = kind;
this.data = data;
}
public boolean equals(final Object o) {
if(o instanceof Node) {
Node c = (Node) o;
if(kind == c.kind) {
switch(kind) {
case K_VOID:
case K_ANY:
case K_META:
case K_NULL:
case K_BOOL:
case K_INT:
case K_RATIONAL:
return true;
case K_SET:
case K_LIST:
case K_PROCESS:
case K_EXISTENTIAL:
case K_DICTIONARY:
return data.equals(c.data);
case K_TUPLE:
case K_FUNCTION:
case K_UNION:
return Arrays.equals((int[])data, (int[])c.data);
case K_RECORD:
return Arrays.equals((Pair[])data, (Pair[])c.data);
}
}
}
return false;
}
public int hashCode() {
if(data == null) {
return kind;
} else {
return kind + data.hashCode();
}
}
public final static String[] kinds = { "void", "any", "meta", "null", "bool",
"int", "rat", "tuple", "dict", "set", "list", "ref", "record", "union",
"fun", "label" };
public String toString() {
if(data instanceof Pair[]) {
return kinds[kind] + " : " + Arrays.toString((Pair[])data);
} else if(data instanceof int[]) {
return kinds[kind] + " : " + Arrays.toString((int[])data);
} else {
return kinds[kind] + " : " + data;
}
}
}
private static final Node[] nodes(Type t) {
if (t instanceof Leaf) {
return new Node[]{new Node(leafKind((Leaf) t), null)};
} else {
// compound type
return ((Compound)t).nodes;
}
}
private static final byte leafKind(Leaf leaf) {
if(leaf instanceof Void) {
return K_VOID;
} else if(leaf instanceof Any) {
return K_ANY;
} else if(leaf instanceof Null) {
return K_NULL;
} else if(leaf instanceof Bool) {
return K_BOOL;
} else if(leaf instanceof Int) {
return K_INT;
} else if(leaf instanceof Real) {
return K_RATIONAL;
} else if(leaf instanceof Meta) {
return K_META;
} else {
// should be dead code
throw new IllegalArgumentException("Invalid leaf node: " + leaf);
}
}
/**
* This method inserts a blank node at the head of the nodes
* array, whilst remapping all existing nodes appropriately.
*
* @param nodes
* @return
*/
private static Node[] insertComponent(Node[] nodes) {
Node[] newnodes = new Node[nodes.length+1];
int[] rmap = new int[nodes.length];
for(int i=0;i!=nodes.length;++i) {
rmap[i] = i+1;
}
for(int i=0;i!=nodes.length;++i) {
newnodes[i+1] = remap(nodes[i],rmap);
}
return newnodes;
}
/**
* The method inserts the nodes in
* <code>from</from> into those in <code>into</code> at the given index.
* This method remaps nodes in <code>from</code>, but does not remap
* any in <code>into</code>
*
* @param start
* @param from
* @param into
* @return
*/
private static Node[] insertNodes(int start, Node[] from, Node[] into) {
int[] rmap = new int[from.length];
for(int i=0;i!=from.length;++i) {
rmap[i] = i+start;
}
for(int i=0;i!=from.length;++i) {
into[i+start] = remap(from[i],rmap);
}
return into;
}
private static Node remap(Node node, int[] rmap) {
Object data;
switch (node.kind) {
case K_SET:
case K_LIST:
case K_PROCESS:
// unary nodes
int element = (Integer) node.data;
data = rmap[element];
break;
case K_DICTIONARY:
// binary node
Pair<Integer, Integer> p = (Pair<Integer, Integer>) node.data;
data = new Pair(rmap[p.first()], rmap[p.second()]);
break;
case K_TUPLE:
case K_UNION:
case K_FUNCTION:
// nary node
int[] bounds = (int[]) node.data;
int[] nbounds = new int[bounds.length];
for (int i = 0; i != bounds.length; ++i) {
if(bounds[i] == -1) {
nbounds[i] = -1; // possible with K_FUNCTION
} else {
nbounds[i] = rmap[bounds[i]];
}
}
data = nbounds;
break;
case K_RECORD:
// labeled nary node
Pair<String, Integer>[] fields = (Pair<String, Integer>[]) node.data;
Pair<String, Integer>[] nfields = new Pair[fields.length];
for (int i = 0; i != fields.length; ++i) {
Pair<String, Integer> field = fields[i];
nfields[i] = new Pair(field.first(), rmap[field.second()]);
}
data = nfields;
break;
default:
return node;
}
return new Node(node.kind, data);
}
/**
* The construct methods constructs a Type from an array of Components.
* It carefully ensures the kind of the root node matches the class
* created (e.g. a kind K_SET results in a class Set).
*
* @param nodes
* @return
*/
private final static Type construct(Node[] nodes) {
Node root = nodes[0];
switch(root.kind) {
case K_VOID:
return T_VOID;
case K_ANY:
return T_ANY;
case K_META:
return T_META;
case K_NULL:
return T_NULL;
case K_BOOL:
return T_BOOL;
case K_INT:
return T_INT;
case K_RATIONAL:
return T_REAL;
case K_TUPLE:
return new Tuple(nodes);
case K_SET:
return new Set(nodes);
case K_LIST:
return new List(nodes);
case K_EXISTENTIAL:
if(root.data == null) {
throw new RuntimeException("Problem");
}
return new Existential((NameID) root.data);
case K_PROCESS:
return new Process(nodes);
case K_DICTIONARY:
return new Dictionary(nodes);
case K_RECORD:
return new Record(nodes);
case K_UNION:
return new Union(nodes);
case K_FUNCTION:
return new Fun(nodes);
default:
throw new IllegalArgumentException("invalid node kind: " + root.kind);
}
}
public static void main(String[] args) {
PrintBuilder printer = new PrintBuilder(System.out);
Type t1 = linkedList();
System.out.println("GOT: " + t1);
System.out.println("MIN: " + minimise(t1));
}
public static Type linkedList() {
Type leaf = T_LABEL("X");
HashMap<String,Type> fields = new HashMap<String,Type>();
fields.put("next", T_UNION(T_NULL,leaf));
fields.put("data", T_BOOL);
Type.Record rec = T_RECORD(fields);
return T_RECURSIVE("X",rec);
}
}
|
package org.mozartoz.truffle.translator;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.mozartoz.truffle.nodes.DerefIfBoundNode;
import org.mozartoz.truffle.nodes.DerefIfBoundNodeGen;
import org.mozartoz.truffle.nodes.DerefNode;
import org.mozartoz.truffle.nodes.DerefNodeGen;
import org.mozartoz.truffle.nodes.ExecuteValuesNode;
import org.mozartoz.truffle.nodes.OzNode;
import org.mozartoz.truffle.nodes.OzRootNode;
import org.mozartoz.truffle.nodes.builtins.BuiltinsManager;
import org.mozartoz.truffle.nodes.builtins.ExceptionBuiltinsFactory.FailNodeFactory.FailNodeGen;
import org.mozartoz.truffle.nodes.builtins.ExceptionBuiltinsFactory.RaiseNodeFactory.RaiseNodeGen;
import org.mozartoz.truffle.nodes.builtins.FloatBuiltinsFactory.FloatDivNodeFactory.FloatDivNodeGen;
import org.mozartoz.truffle.nodes.builtins.IntBuiltinsFactory.DivNodeFactory.DivNodeGen;
import org.mozartoz.truffle.nodes.builtins.IntBuiltinsFactory.ModNodeFactory.ModNodeGen;
import org.mozartoz.truffle.nodes.builtins.ListBuiltinsFactory.HeadNodeGen;
import org.mozartoz.truffle.nodes.builtins.ListBuiltinsFactory.TailNodeGen;
import org.mozartoz.truffle.nodes.builtins.NumberBuiltinsFactory.AddNodeFactory.AddNodeGen;
import org.mozartoz.truffle.nodes.builtins.NumberBuiltinsFactory.MulNodeFactory.MulNodeGen;
import org.mozartoz.truffle.nodes.builtins.NumberBuiltinsFactory.SubNodeFactory.SubNodeGen;
import org.mozartoz.truffle.nodes.builtins.ValueBuiltinsFactory.CatExchangeNodeFactory.CatExchangeNodeGen;
import org.mozartoz.truffle.nodes.builtins.ValueBuiltinsFactory.DotNodeFactory.DotNodeGen;
import org.mozartoz.truffle.nodes.builtins.ValueBuiltinsFactory.EqualNodeFactory.EqualNodeGen;
import org.mozartoz.truffle.nodes.builtins.ValueBuiltinsFactory.GreaterThanNodeFactory.GreaterThanNodeGen;
import org.mozartoz.truffle.nodes.builtins.ValueBuiltinsFactory.GreaterThanOrEqualNodeFactory.GreaterThanOrEqualNodeGen;
import org.mozartoz.truffle.nodes.builtins.ValueBuiltinsFactory.LesserThanNodeFactory.LesserThanNodeGen;
import org.mozartoz.truffle.nodes.builtins.ValueBuiltinsFactory.LesserThanOrEqualNodeFactory.LesserThanOrEqualNodeGen;
import org.mozartoz.truffle.nodes.builtins.ValueBuiltinsFactory.NotEqualNodeFactory.NotEqualNodeGen;
import org.mozartoz.truffle.nodes.call.CallMethodNodeGen;
import org.mozartoz.truffle.nodes.call.CallNodeGen;
import org.mozartoz.truffle.nodes.call.CallProcNodeGen;
import org.mozartoz.truffle.nodes.call.ReadArgumentNode;
import org.mozartoz.truffle.nodes.call.SelfTailCallCatcherNode;
import org.mozartoz.truffle.nodes.call.SelfTailCallThrowerNode;
import org.mozartoz.truffle.nodes.call.TailCallCatcherNode;
import org.mozartoz.truffle.nodes.call.TailCallThrowerNode;
import org.mozartoz.truffle.nodes.control.AndNode;
import org.mozartoz.truffle.nodes.control.AndThenNode;
import org.mozartoz.truffle.nodes.control.IfNode;
import org.mozartoz.truffle.nodes.control.NoElseNode;
import org.mozartoz.truffle.nodes.control.OrElseNode;
import org.mozartoz.truffle.nodes.control.SequenceNode;
import org.mozartoz.truffle.nodes.control.SkipNode;
import org.mozartoz.truffle.nodes.control.TryNode;
import org.mozartoz.truffle.nodes.literal.BooleanLiteralNode;
import org.mozartoz.truffle.nodes.literal.ConsLiteralNodeGen;
import org.mozartoz.truffle.nodes.literal.ListLiteralNode;
import org.mozartoz.truffle.nodes.literal.LiteralNode;
import org.mozartoz.truffle.nodes.literal.LongLiteralNode;
import org.mozartoz.truffle.nodes.literal.MakeDynamicRecordNode;
import org.mozartoz.truffle.nodes.literal.ProcDeclarationAndExtractionNode;
import org.mozartoz.truffle.nodes.literal.ProcDeclarationNode;
import org.mozartoz.truffle.nodes.literal.RecordLiteralNode;
import org.mozartoz.truffle.nodes.literal.UnboundLiteralNode;
import org.mozartoz.truffle.nodes.local.BindNodeGen;
import org.mozartoz.truffle.nodes.local.CopyVariableToFrameNode;
import org.mozartoz.truffle.nodes.local.CopyVariableToFrameNodeGen;
import org.mozartoz.truffle.nodes.local.FrameSlotNode;
import org.mozartoz.truffle.nodes.local.InitializeArgNode;
import org.mozartoz.truffle.nodes.local.InitializeTmpNode;
import org.mozartoz.truffle.nodes.local.InitializeVarNode;
import org.mozartoz.truffle.nodes.local.ReadCapturedVariableNodeGen;
import org.mozartoz.truffle.nodes.local.ReadFrameSlotNodeGen;
import org.mozartoz.truffle.nodes.local.ReadLocalVariableNode;
import org.mozartoz.truffle.nodes.local.ResetSlotsNode;
import org.mozartoz.truffle.nodes.local.WriteCapturedVariableNode;
import org.mozartoz.truffle.nodes.local.WriteFrameSlotNodeGen;
import org.mozartoz.truffle.nodes.pattern.PatternMatchConsNodeGen;
import org.mozartoz.truffle.nodes.pattern.PatternMatchEqualNodeGen;
import org.mozartoz.truffle.nodes.pattern.PatternMatchIdentityNodeGen;
import org.mozartoz.truffle.nodes.pattern.PatternMatchOpenRecordNodeGen;
import org.mozartoz.truffle.nodes.pattern.PatternMatchRecordNodeGen;
import org.mozartoz.truffle.runtime.Arity;
import org.mozartoz.truffle.runtime.ArrayUtils;
import org.mozartoz.truffle.runtime.OzCell;
import org.mozartoz.truffle.runtime.OzChunk;
import org.mozartoz.truffle.runtime.OzCons;
import org.mozartoz.truffle.runtime.OzDict;
import org.mozartoz.truffle.runtime.OzLanguage;
import org.mozartoz.truffle.runtime.OzName;
import org.mozartoz.truffle.runtime.OzObject;
import org.mozartoz.truffle.runtime.OzProc;
import org.mozartoz.truffle.runtime.OzUniqueName;
import org.mozartoz.truffle.runtime.OzVar;
import org.mozartoz.truffle.runtime.Unit;
import org.objenesis.strategy.StdInstantiatorStrategy;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Kryo.DefaultInstantiatorStrategy;
import com.esotericsoftware.kryo.Serializer;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.serializers.FieldSerializer;
import com.oracle.truffle.api.RootCallTarget;
import com.oracle.truffle.api.Truffle;
import com.oracle.truffle.api.TruffleLanguage.Env;
import com.oracle.truffle.api.TruffleRuntime;
import com.oracle.truffle.api.dsl.GeneratedBy;
import com.oracle.truffle.api.dsl.NodeFactory;
import com.oracle.truffle.api.frame.Frame;
import com.oracle.truffle.api.frame.FrameDescriptor;
import com.oracle.truffle.api.frame.FrameSlot;
import com.oracle.truffle.api.frame.MaterializedFrame;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.nodes.Node.Child;
import com.oracle.truffle.api.nodes.NodeUtil;
import com.oracle.truffle.api.nodes.RootNode;
import com.oracle.truffle.api.object.DynamicObject;
import com.oracle.truffle.api.object.Property;
import com.oracle.truffle.api.object.Shape;
import com.oracle.truffle.api.source.Source;
import com.oracle.truffle.api.source.SourceSection;
public class OzSerializer implements AutoCloseable {
private static class StringSerializer extends Serializer<String> {
public void write(Kryo kryo, Output output, String str) {
output.writeString(str);
}
public String read(Kryo kryo, Input input, Class<String> type) {
return input.readString().intern();
}
}
private static class OzUniqueNameSerializer extends Serializer<OzUniqueName> {
public void write(Kryo kryo, Output output, OzUniqueName uniqueName) {
output.writeString(uniqueName.getName());
}
public OzUniqueName read(Kryo kryo, Input input, Class<OzUniqueName> type) {
String name = input.readString().intern();
return OzUniqueName.get(name);
}
}
private static class ShapeSerializer extends Serializer<Shape> {
public void write(Kryo kryo, Output output, Shape shape) {
List<Property> propertyList = shape.getPropertyListInternal(true);
Property firstProperty = propertyList.get(0);
if (firstProperty != Arity.LABEL_PROPERTY) {
throw new Error("first property was " + firstProperty);
}
int n = propertyList.size() - 1;
output.writeInt(n);
for (int i = 1; i < propertyList.size(); i++) {
Property property = propertyList.get(i);
Object key = property.getKey();
kryo.writeClassAndObject(output, key);
}
}
public Shape read(Kryo kryo, Input input, Class<Shape> type) {
int n = input.readInt();
Object[] features = new Object[n];
for (int i = 0; i < n; i++) {
features[i] = kryo.readClassAndObject(input);
}
return Arity.build("deserialize", features).getShape();
}
}
private static class DynamicObjectSerializer extends Serializer<DynamicObject> {
public void write(Kryo kryo, Output output, DynamicObject object) {
Shape shape = object.getShape();
kryo.writeObject(output, shape);
List<Property> propertyList = shape.getPropertyListInternal(true);
output.writeInt(propertyList.size());
for (Property property : propertyList) {
Object value = property.get(object, shape);
kryo.writeClassAndObject(output, value);
}
}
public DynamicObject read(Kryo kryo, Input input, Class<DynamicObject> type) {
Shape shape = (Shape) kryo.readObject(input, SHAPE);
DynamicObject dynamicObject = shape.newInstance();
kryo.reference(dynamicObject);
int n = input.readInt();
List<Property> propertyList = shape.getPropertyListInternal(true);
assert propertyList.get(0) == Arity.LABEL_PROPERTY;
assert n == propertyList.size();
for (int i = 0; i < n; i++) {
Property property = propertyList.get(i);
Object value = kryo.readClassAndObject(input);
property.setInternal(dynamicObject, value);
}
return dynamicObject;
}
}
private static class OzProcSerializer extends Serializer<OzProc> {
public void write(Kryo kryo, Output output, OzProc proc) {
RootNode rootNode = proc.callTarget.getRootNode();
SourceSection section = rootNode.getSourceSection();
boolean isBuiltin = section.getSource().isInternal();
output.writeBoolean(isBuiltin);
if (isBuiltin) {
output.writeString(rootNode.getName());
} else {
output.writeInt(proc.arity);
kryo.writeObject(output, proc.callTarget);
kryo.writeObject(output, proc.declarationFrame);
}
}
public OzProc read(Kryo kryo, Input input, Class<OzProc> type) {
boolean isBuiltin = input.readBoolean();
final OzProc ozProc;
if (isBuiltin) {
String identifier = input.readString();
ozProc = BuiltinsManager.getBuiltin(identifier);
} else {
int arity = input.readInt();
ozProc = new OzProc(null, null, arity);
kryo.reference(ozProc);
ozProc.callTarget = kryo.readObject(input, ROOT_CALL_TARGET);
ozProc.declarationFrame = kryo.readObject(input, MATERIALIZED_FRAME);
}
return ozProc;
}
}
private static class RootCallTargetSerializer extends Serializer<RootCallTarget> {
public void write(Kryo kryo, Output output, RootCallTarget callTarget) {
kryo.writeObject(output, callTarget.getRootNode());
}
public RootCallTarget read(Kryo kryo, Input input, Class<RootCallTarget> type) {
OzRootNode rootNode = kryo.readObject(input, OzRootNode.class);
RootCallTarget callTarget = rootNode.toCallTarget();
assert NodeUtil.verify(rootNode);
return callTarget;
}
}
private static class OzRootNodeSerializer extends Serializer<OzRootNode> {
private final OzLanguage language;
public OzRootNodeSerializer(OzLanguage language) {
this.language = language;
}
public void write(Kryo kryo, Output output, OzRootNode rootNode) {
kryo.writeClassAndObject(output, rootNode.getSourceSection());
output.writeString(rootNode.getName());
kryo.writeObject(output, rootNode.getFrameDescriptor());
kryo.writeClassAndObject(output, rootNode.getBody());
output.writeInt(rootNode.getArity());
output.writeBoolean(rootNode.isForceSplitting());
}
public OzRootNode read(Kryo kryo, Input input, Class<OzRootNode> type) {
SourceSection sourceSection = (SourceSection) kryo.readClassAndObject(input);
String name = input.readString();
FrameDescriptor frameDescriptor = kryo.readObject(input, FrameDescriptor.class);
OzNode body = (OzNode) kryo.readClassAndObject(input);
int arity = input.readInt();
boolean forceSplitting = input.readBoolean();
return new OzRootNode(language, sourceSection, name, frameDescriptor, body, arity, forceSplitting);
}
}
private static class FrameSlotSerializer extends Serializer<FrameSlot> {
@SuppressWarnings("deprecation")
public void write(Kryo kryo, Output output, FrameSlot frameSlot) {
kryo.writeObject(output, frameSlot.getFrameDescriptor());
output.writeString((String) frameSlot.getIdentifier());
}
public FrameSlot read(Kryo kryo, Input input, Class<FrameSlot> type) {
FrameDescriptor frameDescriptor = kryo.readObject(input, FrameDescriptor.class);
String identifier = input.readString().intern();
return frameDescriptor.findOrAddFrameSlot(identifier);
}
}
private static class FrameDescriptorSerializer extends Serializer<FrameDescriptor> {
public void write(Kryo kryo, Output output, FrameDescriptor frameDescriptor) {
assert frameDescriptor.getDefaultValue() == null;
List<? extends FrameSlot> slots = frameDescriptor.getSlots();
output.writeInt(slots.size());
for (FrameSlot frameSlot : slots) {
String identifier = (String) frameSlot.getIdentifier();
output.writeString(identifier);
}
}
public FrameDescriptor read(Kryo kryo, Input input, Class<FrameDescriptor> type) {
FrameDescriptor frameDescriptor = new FrameDescriptor(null);
kryo.reference(frameDescriptor);
int n = input.readInt();
for (int i = 0; i < n; i++) {
String identifier = input.readString().intern();
frameDescriptor.addFrameSlot(identifier);
}
return frameDescriptor;
}
}
private static class FrameSerializer extends Serializer<Frame> {
static final Object[] FAKE_ARGUMENTS = ArrayUtils.EMPTY;
Field argumentsField;
Field wrappedField;
public FrameSerializer() {
try {
argumentsField = MATERIALIZED_FRAME.getDeclaredField("arguments");
argumentsField.setAccessible(true);
} catch (NoSuchFieldException nsfe) {
try {
wrappedField = MATERIALIZED_FRAME.getDeclaredField("wrapped");
wrappedField.setAccessible(true);
argumentsField = wrappedField.get(FRAME).getClass().getDeclaredField("arguments");
argumentsField.setAccessible(true);
} catch (ReflectiveOperationException e) {
throw new Error(e);
}
}
}
public void write(Kryo kryo, Output output, Frame frame) {
FrameDescriptor frameDescriptor = frame.getFrameDescriptor();
kryo.writeObject(output, frameDescriptor);
kryo.writeObject(output, frame.getArguments());
for (FrameSlot slot : frameDescriptor.getSlots()) {
Object value = frame.getValue(slot);
kryo.writeClassAndObject(output, value);
}
}
public Frame read(Kryo kryo, Input input, Class<Frame> type) {
FrameDescriptor frameDescriptor = kryo.readObject(input, FrameDescriptor.class);
Frame frame = TRUFFLE.createMaterializedFrame(FAKE_ARGUMENTS, frameDescriptor);
kryo.reference(frame);
setFrameArguments(frame, kryo.readObject(input, Object[].class));
for (FrameSlot slot : frameDescriptor.getSlots()) {
Object value = kryo.readClassAndObject(input);
frame.setObject(slot, value);
}
return frame;
}
private void setFrameArguments(Frame frame, Object[] arguments) {
try {
if (wrappedField == null) {
argumentsField.set(frame, arguments);
} else {
argumentsField.set(wrappedField.get(frame), arguments);
}
} catch (ReflectiveOperationException e) {
throw new Error(e);
}
}
}
private static class OzVarSerializer extends Serializer<OzVar> {
public void write(Kryo kryo, Output output, OzVar var) {
assert var.isBound();
kryo.writeClassAndObject(output, var.getBoundValue(null));
}
public OzVar read(Kryo kryo, Input input, Class<OzVar> type) {
OzVar var = new OzVar();
kryo.reference(var);
Object value = kryo.readClassAndObject(input);
var.bind(value);
return var;
}
}
private static class FileSourceSerializer extends Serializer<Source> {
private final Env env;
public FileSourceSerializer(Env env) {
this.env = env;
}
public void write(Kryo kryo, Output output, Source source) {
assert source.getPath() != null || source == Loader.MAIN_SOURCE;
output.writeString(source.getPath());
}
public Source read(Kryo kryo, Input input, Class<Source> type) {
String path = input.readString();
if (path == null) {
return Loader.MAIN_SOURCE;
} else {
return Loader.createSource(env, path);
}
}
}
private static class SourceSectionSerializer extends Serializer<SourceSection> {
private final Class<? extends Source> sourceClass;
public SourceSectionSerializer(Class<? extends Source> sourceClass) {
this.sourceClass = sourceClass;
}
public void write(Kryo kryo, Output output, SourceSection section) {
assert section.isAvailable();
kryo.writeObject(output, section.getSource());
output.writeInt(section.getCharIndex());
output.writeInt(section.getCharLength());
}
public SourceSection read(Kryo kryo, Input input, Class<SourceSection> type) {
Source source = kryo.readObject(input, sourceClass);
int charIndex = input.readInt();
int charLength = input.readInt();
return source.createSection(charIndex, charLength);
}
}
private static Object underef(Object value) {
if (value instanceof DerefNode) {
return ((DerefNode) value).getValue();
} else if (value instanceof DerefIfBoundNode) {
return ((DerefIfBoundNode) value).getValue();
} else {
return value;
}
}
private static class CopyVariableToFrameNodeSerializer extends Serializer<CopyVariableToFrameNode> {
public void write(Kryo kryo, Output output, CopyVariableToFrameNode node) {
kryo.writeClassAndObject(output, node.getReadNode());
kryo.writeObject(output, node.slot);
}
public CopyVariableToFrameNode read(Kryo kryo, Input input, Class<CopyVariableToFrameNode> type) {
OzNode readNode = (OzNode) kryo.readClassAndObject(input);
FrameSlot slot = kryo.readObject(input, FrameSlot.class);
return CopyVariableToFrameNode.create(readNode, slot);
}
}
private static class NodeSerializer extends Serializer<Node> {
private final Field[] fields;
private final Constructor<?> constructor;
public NodeSerializer(Class<? extends Node> klass) {
Constructor<?>[] constructors = klass.getDeclaredConstructors();
assert constructors.length == 1 : klass;
constructor = constructors[0];
constructor.setAccessible(true);
Class<?>[] parameterTypes = constructor.getParameterTypes();
List<Field> toSave = new ArrayList<>();
int i = 0;
for (Field field : klass.getDeclaredFields()) {
Class<?> type = field.getType();
if (type == parameterTypes[i]) {
field.setAccessible(true);
toSave.add(field);
i++;
} else if (parameterTypes[i] == FrameSlot.class && FrameSlotNode.class.isAssignableFrom(klass)) {
toSave.add(null);
i++;
}
if (i == parameterTypes.length) {
break;
}
}
fields = toSave.toArray(new Field[toSave.size()]);
assert i == parameterTypes.length;
}
public void write(Kryo kryo, Output output, Node node) {
try {
for (Field field : fields) {
Object value;
if (field == null) {
value = ((FrameSlotNode) node).getSlot();
} else {
value = field.get(node);
if (value.getClass() == OzNode[].class) {
OzNode[] nodes = (OzNode[]) value;
OzNode[] values = new OzNode[nodes.length];
for (int i = 0; i < nodes.length; i++) {
values[i] = (OzNode) underef(nodes[i]);
}
value = values;
} else {
value = underef(value);
}
}
kryo.writeClassAndObject(output, value);
}
kryo.writeClassAndObject(output, node.getSourceSection());
} catch (ReflectiveOperationException e) {
throw new Error(e);
}
}
public Node read(Kryo kryo, Input input, Class<Node> type) {
Object[] values = new Object[fields.length];
for (int i = 0; i < fields.length; i++) {
values[i] = kryo.readClassAndObject(input);
}
SourceSection sourceSection = (SourceSection) kryo.readClassAndObject(input);
try {
Node node = (Node) constructor.newInstance(values);
if (node instanceof OzNode && node.getSourceSection() == null) {
((OzNode) node).setSourceSection(sourceSection);
}
return node;
} catch (ReflectiveOperationException e) {
throw new Error(e);
}
}
}
private static class DSLNodeSerializer extends Serializer<Node> {
private final Field[] fields;
private final Method constructor;
private final boolean removeDeref;
private static final Pattern CACHE_FIELD_PATTERN = Pattern.compile("_.+_");
public DSLNodeSerializer(Class<? extends Node> genClass) {
Class<?> baseClass = genClass.getAnnotation(GeneratedBy.class).value();
List<Field> toSave = new ArrayList<>();
for (Field field : baseClass.getDeclaredFields()) {
if (!Modifier.isStatic(field.getModifiers())) {
if (!field.isAnnotationPresent(Child.class)) { // most likely a helper node
toSave.add(field);
}
}
}
for (Field field : genClass.getDeclaredFields()) {
String name = field.getName();
if (!name.equals("state_")
&& !name.startsWith("exclude_")
&& !name.endsWith("_cache")
&& !name.startsWith("$")
&& !CACHE_FIELD_PATTERN.matcher(name).find()) {
toSave.add(field);
}
}
fields = toSave.toArray(new Field[toSave.size()]);
Class<?>[] parameterTypes = new Class[fields.length];
for (int i = 0; i < fields.length; i++) {
fields[i].setAccessible(true);
parameterTypes[i] = fields[i].getType();
}
Class<?> enclosingClass = genClass.getEnclosingClass();
boolean hasFactoryClass = enclosingClass != null && NodeFactory.class.isAssignableFrom(enclosingClass);
final Class<?> factoryClass = hasFactoryClass ? enclosingClass : genClass;
this.removeDeref = !hasFactoryClass;
try {
constructor = factoryClass.getMethod("create", parameterTypes);
} catch (NoSuchMethodException e) {
throw new Error(e);
}
}
public void write(Kryo kryo, Output output, Node node) {
try {
for (Field field : fields) {
Object value = field.get(node);
if (removeDeref) {
value = underef(value);
}
kryo.writeClassAndObject(output, value);
}
} catch (ReflectiveOperationException e) {
throw new Error(e);
}
}
public Node read(Kryo kryo, Input input, Class<Node> type) {
Object[] values = new Object[fields.length];
for (int i = 0; i < fields.length; i++) {
values[i] = kryo.readClassAndObject(input);
}
try {
return (Node) constructor.invoke(null, values);
} catch (ReflectiveOperationException e) {
throw new Error(e);
}
}
}
private static class PrintStreamSerializer extends Serializer<PrintStream> {
public void write(Kryo kryo, Output output, PrintStream stream) {
if (stream == System.out) {
output.writeByte(1);
} else if (stream == System.err) {
output.writeByte(2);
} else {
throw new Error();
}
}
public PrintStream read(Kryo kryo, Input input, Class<PrintStream> type) {
switch (input.readByte()) {
case 1:
return System.out;
case 2:
return System.err;
default:
throw new Error();
}
}
}
private static class InputStreamSerializer extends Serializer<InputStream> {
public void write(Kryo kryo, Output output, InputStream stream) {
if (stream == System.in) {
output.writeByte(0);
} else {
throw new Error();
}
}
public InputStream read(Kryo kryo, Input input, Class<InputStream> type) {
switch (input.readByte()) {
case 0:
return (BufferedInputStream) System.in;
default:
throw new Error();
}
}
}
private static class SingletonSerializer extends Serializer<Object> {
private final Object singleton;
public SingletonSerializer(Object singleton) {
this.singleton = singleton;
}
public void write(Kryo kryo, Output output, Object object) {
}
public Object read(Kryo kryo, Input input, Class<Object> type) {
return singleton;
}
}
private static final TruffleRuntime TRUFFLE = Truffle.getRuntime();
private static final MaterializedFrame FRAME = TRUFFLE.createMaterializedFrame(ArrayUtils.EMPTY);
private static final Class<? extends MaterializedFrame> MATERIALIZED_FRAME = FRAME.getClass();
private static final Class<? extends RootCallTarget> ROOT_CALL_TARGET =
TRUFFLE.createCallTarget(RootNode.createConstantNode(null)).getClass();
private static final Class<? extends Shape> SHAPE = Arity.EMPTY.getClass();
private static final Class<? extends DynamicObject> DYNAMIC_OBJECT = Arity.EMPTY.newInstance().getClass();
private final Kryo kryo;
public OzSerializer(Env env, OzLanguage language) {
kryo = new Kryo();
kryo.setRegistrationRequired(true);
kryo.setReferences(true);
kryo.setInstantiatorStrategy(new DefaultInstantiatorStrategy(new StdInstantiatorStrategy()));
kryo.setDefaultSerializer((k, type) -> {
if (Node.class.isAssignableFrom(type)) {
@SuppressWarnings("unchecked")
Class<? extends Node> nodeClass = (Class<? extends Node>) type;
if (type.getName().endsWith("Gen")) {
return new DSLNodeSerializer(nodeClass);
} else {
return new NodeSerializer(nodeClass);
}
} else {
return new FieldSerializer<>(k, type);
}
});
// atoms
kryo.register(String.class, new StringSerializer());
// procs
kryo.register(SHAPE, new ShapeSerializer());
kryo.register(DYNAMIC_OBJECT, new DynamicObjectSerializer());
kryo.register(OzProc.class, new OzProcSerializer());
kryo.register(ROOT_CALL_TARGET, new RootCallTargetSerializer());
kryo.register(OzRootNode.class, new OzRootNodeSerializer(language));
// nodes
kryo.register(OzNode[].class);
kryo.register(SequenceNode.class);
kryo.register(InitializeArgNode.class);
kryo.register(InitializeVarNode.class);
kryo.register(InitializeTmpNode.class);
kryo.register(ReadArgumentNode.class);
kryo.register(WriteFrameSlotNodeGen.class);
kryo.register(ReadFrameSlotNodeGen.class);
kryo.register(ExecuteValuesNode.class);
kryo.register(CallNodeGen.class);
kryo.register(CallProcNodeGen.class);
kryo.register(CallMethodNodeGen.class);
kryo.register(TailCallCatcherNode.class);
kryo.register(TailCallThrowerNode.class);
kryo.register(SelfTailCallCatcherNode.class);
kryo.register(SelfTailCallThrowerNode.class);
kryo.register(ReadLocalVariableNode.class);
kryo.register(ReadCapturedVariableNodeGen.class);
kryo.register(WriteCapturedVariableNode.class);
kryo.register(CopyVariableToFrameNodeGen.class, new CopyVariableToFrameNodeSerializer());
kryo.register(CopyVariableToFrameNode[].class);
kryo.register(ResetSlotsNode.class);
kryo.register(ProcDeclarationNode.class);
kryo.register(ProcDeclarationAndExtractionNode.class);
kryo.register(ListLiteralNode.class);
kryo.register(RecordLiteralNode.class);
kryo.register(MakeDynamicRecordNode.class);
kryo.register(SkipNode.class);
kryo.register(UnboundLiteralNode.class);
kryo.register(IfNode.class);
kryo.register(TryNode.class);
kryo.register(FailNodeGen.class);
kryo.register(RaiseNodeGen.class);
kryo.register(NoElseNode.class);
kryo.register(AndNode.class);
kryo.register(AndThenNode.class);
kryo.register(OrElseNode.class);
kryo.register(BindNodeGen.class);
kryo.register(PatternMatchEqualNodeGen.class);
kryo.register(PatternMatchIdentityNodeGen.class);
kryo.register(PatternMatchConsNodeGen.class);
kryo.register(PatternMatchRecordNodeGen.class);
kryo.register(PatternMatchOpenRecordNodeGen.class);
kryo.register(DerefNodeGen.class);
kryo.register(DerefIfBoundNodeGen.class);
kryo.register(HeadNodeGen.class);
kryo.register(TailNodeGen.class);
kryo.register(AddNodeGen.class);
kryo.register(SubNodeGen.class);
kryo.register(MulNodeGen.class);
kryo.register(DivNodeGen.class);
kryo.register(FloatDivNodeGen.class);
kryo.register(ModNodeGen.class);
kryo.register(EqualNodeGen.class);
kryo.register(NotEqualNodeGen.class);
kryo.register(LesserThanNodeGen.class);
kryo.register(LesserThanOrEqualNodeGen.class);
kryo.register(GreaterThanNodeGen.class);
kryo.register(GreaterThanOrEqualNodeGen.class);
kryo.register(DotNodeGen.class);
kryo.register(CatExchangeNodeGen.class);
kryo.register(LiteralNode.class);
kryo.register(BooleanLiteralNode.class);
kryo.register(LongLiteralNode.class);
kryo.register(ConsLiteralNodeGen.class);
// sources
Source fileSource = Loader.createSource(env, Loader.INIT_FUNCTOR);
kryo.register(fileSource.getClass(), new FileSourceSerializer(env));
kryo.register(SourceSection.class, new SourceSectionSerializer(fileSource.getClass()));
kryo.register(String[].class);
// frames
kryo.register(FrameSlot.class, new FrameSlotSerializer());
kryo.register(FrameDescriptor.class, new FrameDescriptorSerializer());
kryo.register(MATERIALIZED_FRAME, new FrameSerializer());
kryo.register(FrameSlot[].class);
kryo.register(Object[].class);
kryo.register(long[].class);
kryo.register(byte[].class);
// values
kryo.register(Arity.class);
kryo.register(OzLanguage.class, new SingletonSerializer(language));
kryo.register(Unit.class, new SingletonSerializer(Unit.INSTANCE));
kryo.register(OzVar.class, new OzVarSerializer());
kryo.register(OzName.class);
kryo.register(OzUniqueName.class, new OzUniqueNameSerializer());
kryo.register(OzCons.class);
kryo.register(OzChunk.class);
kryo.register(OzCell.class);
kryo.register(OzDict.class);
kryo.register(OzObject.class);
kryo.register(PrintStream.class, new PrintStreamSerializer());
kryo.register(System.in.getClass(), new InputStreamSerializer());
}
@Override
public void close() {
}
public void serialize(Object object, String path) {
try (Output output = new Output(new FileOutputStream(path))) {
kryo.writeClassAndObject(output, object);
} catch (FileNotFoundException e) {
throw new Error(e);
} catch (Throwable e) {
new File(path).delete();
throw e;
}
}
public <T> T deserialize(String path, Class<T> klass) {
try (Input input = new Input(new FileInputStream(path))) {
Object value = kryo.readClassAndObject(input);
return klass.cast(value);
} catch (IOException e) {
throw new Error(e);
}
}
}
|
package com.microsoft.graph.functional;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import okhttp3.Request;
import com.microsoft.graph.http.HttpMethod;
import com.microsoft.graph.models.extensions.DirectoryObject;
import com.microsoft.graph.models.extensions.Drive;
import com.microsoft.graph.models.extensions.DriveItem;
import com.microsoft.graph.models.extensions.Group;
import com.microsoft.graph.models.extensions.IGraphServiceClient;
import com.microsoft.graph.models.extensions.ProfilePhoto;
import com.microsoft.graph.models.extensions.User;
import com.microsoft.graph.options.HeaderOption;
import com.microsoft.graph.options.Option;
import com.microsoft.graph.requests.extensions.IContactCollectionPage;
import com.microsoft.graph.requests.extensions.IDirectoryObjectCollectionWithReferencesPage;
import com.microsoft.graph.requests.extensions.IDriveItemCollectionPage;
import com.microsoft.graph.requests.extensions.IGroupCollectionPage;
import com.microsoft.graph.requests.extensions.IMailFolderCollectionPage;
import com.microsoft.graph.requests.extensions.IMessageCollectionPage;
import com.microsoft.graph.requests.extensions.IOrganizationCollectionPage;
import com.microsoft.graph.requests.extensions.IUsedInsightCollectionPage;
import com.microsoft.graph.requests.extensions.IUserCollectionPage;
@Ignore
public class UserTests {
IGraphServiceClient graphServiceClient = null;
@Before
public void setUp() {
TestBase testBase = new TestBase();
graphServiceClient = testBase.graphClient;
}
@Test
public void getMeTest() {
//GET me
User user = graphServiceClient.me().buildRequest().get();
assertNotNull(user);
assertNotNull(user.displayName);
}
@Test
public void getMePhoto() {
//GET me/photo/$value
User user = graphServiceClient.me().buildRequest().get();
assertNotNull(user);
if(user.photo != null) {
InputStream stream = graphServiceClient.me().photo().content().buildRequest().get();
assertNotNull(stream);
}
}
@Test
public void meDriveTest() {
//GET me/drive/root/children
IDriveItemCollectionPage driveItemCollectionPage = graphServiceClient.me().drive().root().children().buildRequest().get();
assertNotNull(driveItemCollectionPage);
}
@Test
public void userKeyTest() {
//GET users('<<key>>')
IUserCollectionPage userCollectionPage = graphServiceClient.users().buildRequest().get();
assertNotNull(userCollectionPage);
assertNotNull(userCollectionPage.additionalDataManager().get("graphResponseHeaders"));
List<User> list = userCollectionPage.getCurrentPage();
if(list.size() > 0) {
User user = graphServiceClient.users(list.get(0).id).buildRequest().get();
assertNotNull(user);
}
}
@Test
public void meDriveRoot() {
//GET me/drive/root
DriveItem driveItem = graphServiceClient.me().drive().root().buildRequest().get();
assertNotNull(driveItem);
}
@Test
public void meDrive() {
//GET me/drive
Drive drive = graphServiceClient.me().drive().buildRequest().get();
assertNotNull(drive);
}
@Test
public void meDriveItems() {
//GET me/drive/items('<key>')
IDriveItemCollectionPage driveItemCollectionPage = graphServiceClient.me().drive().items().buildRequest().get();
assertNotNull(driveItemCollectionPage);
if(driveItemCollectionPage.getCurrentPage().size() > 0) {
DriveItem item = graphServiceClient.me().drive().items(driveItemCollectionPage.getCurrentPage().get(0).id).buildRequest().get();
assertNotNull(item);
}
}
@Test
public void meMessagesTest() {
//GET me/messages
IMessageCollectionPage messageCollectionPage = graphServiceClient.me().messages().buildRequest().get();
assertNotNull(messageCollectionPage);
}
@Test
public void meContactsTest() {
//GET me/contacts
IContactCollectionPage contactCollectionPage = graphServiceClient.me().contacts().buildRequest().get();
assertNotNull(contactCollectionPage);
}
@Test
public void usersKeyPhotoValueTest() {
//GET users('<<key>>')/photo/$value
IUserCollectionPage userCollectionPage = graphServiceClient.users().buildRequest().get();
for(User user:userCollectionPage.getCurrentPage()) {
if(user.photo!=null) {
InputStream stream = graphServiceClient.users(userCollectionPage.getCurrentPage().get(0).id).photo().content().buildRequest().get();
assertNotNull(stream);
break;
}
}
}
@Test
public void updateUserPhotoValueTest() throws Exception {
final File photo = new File("src/test/resources/hamilton.jpg");
final InputStream fileStream = new FileInputStream(photo);
graphServiceClient.me().photo().content().buildRequest().put(OutlookTests.getByteArray(fileStream));
}
@Test
public void getOrganization() {
//GET organization
IOrganizationCollectionPage organizationCollectionPage = graphServiceClient.organization().buildRequest().get();
assertNotNull(organizationCollectionPage);
}
@Test
public void meInsightsUsed() {
//GET me/insights/used
IUsedInsightCollectionPage usedInsightCollectionPage = graphServiceClient.me().insights().used().buildRequest().get();
assertNotNull(usedInsightCollectionPage);
}
@Test
public void mailFoldertest() {
//GET me/mailFolders
IMailFolderCollectionPage mailFolderCollectionPage = graphServiceClient.me().mailFolders().buildRequest().get();
assertNotNull(mailFolderCollectionPage);
if(mailFolderCollectionPage.getCurrentPage().size() > 0) {
String mailFolderId = mailFolderCollectionPage.getCurrentPage().get(0).id;
IMessageCollectionPage messageCollectionPage = graphServiceClient.me().mailFolders(mailFolderId).messages().buildRequest().get();
assertNotNull(messageCollectionPage);
}
}
@Test
public void meMemberof() {
IDirectoryObjectCollectionWithReferencesPage page = graphServiceClient.me().memberOf().buildRequest().get();
assertNotNull(page);
}
@Test
public void getMeAndRetryOnThrottling() throws Exception {
ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
try {
for(Integer i = 0; i < 2000; i++) {
exec.submit(new Runnable() {
@Override
public void run() {
final IUserCollectionPage users = graphServiceClient.users().buildRequest().get();
assertNotNull(users);
final List<User> currentPage = users.getCurrentPage();
assertNotNull(currentPage);
assertNotEquals(0, currentPage.size());
}
});
}
exec.awaitTermination(5L, TimeUnit.MINUTES);
} finally {
exec.shutdown();
}
}
@Test
public void emptyPostContentType() {
final String contentTypeValue = "application/json";
final HeaderOption ctype = new HeaderOption("Content-Type", contentTypeValue);
final ArrayList<Option> options = new ArrayList<>();
options.add(ctype);
final Request request = graphServiceClient.me()
.revokeSignInSessions()
.buildRequest(options)
.withHttpMethod(HttpMethod.POST)
.getHttpRequest();
assertEquals(contentTypeValue, request.body().contentType().toString());
}
@Test
public void castTest() {
final IGroupCollectionPage groups = graphServiceClient.groups().buildRequest().top(1).get();
final Group group = groups.getCurrentPage().get(0);
final IUserCollectionPage usersPage = graphServiceClient
.groups(group.id)
.members()
.castToUser()
.buildRequest()
.get();
assertNotNull(usersPage);
final IDirectoryObjectCollectionWithReferencesPage testUserCollection = graphServiceClient
.groups(group.id)
.members()
.buildRequest()
.top(1)
.get();
final DirectoryObject testUser = testUserCollection.getCurrentPage().get(0);
final User user = graphServiceClient
.groups(group.id)
.members(testUser.id)
.castToUser()
.buildRequest()
.get();
assertNotNull(user);
}
}
|
package com.intellij.openapi.util;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.IconLoader.CachedImageIcon.HandleNotFound;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.reference.SoftReference;
import com.intellij.ui.RetrievableIcon;
import com.intellij.ui.icons.*;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.ui.scale.ScaleContext;
import com.intellij.ui.scale.ScaleContextSupport;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.FixedHashMap;
import com.intellij.util.ui.ImageUtil;
import com.intellij.util.ui.JBImageIcon;
import com.intellij.util.ui.StartupUiUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.*;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.awt.image.ImageFilter;
import java.awt.image.RGBImageFilter;
import java.lang.reflect.Field;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.Supplier;
import static com.intellij.ui.paint.PaintUtil.RoundingMode.ROUND;
import static com.intellij.ui.scale.DerivedScaleType.DEV_SCALE;
import static com.intellij.ui.scale.DerivedScaleType.EFF_USR_SCALE;
import static com.intellij.ui.scale.ScaleType.OBJ_SCALE;
import static com.intellij.ui.scale.ScaleType.SYS_SCALE;
public final class IconLoader {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.util.IconLoader");
private static final String LAF_PREFIX = "/com/intellij/ide/ui/laf/icons/";
private static final String ICON_CACHE_URL_KEY = "ICON_CACHE_URL_KEY";
// the key: Pair(ICON_CACHE_URL_KEY, url) or Pair(path, classLoader)
private static final ConcurrentMap<Pair<String, Object>, CachedImageIcon> ourIconsCache =
ContainerUtil.newConcurrentMap(100, 0.9f, 2);
/**
* This cache contains mapping between icons and disabled icons.
*/
private static final Map<Icon, Icon> ourIcon2DisabledIcon = ContainerUtil.createWeakMap(200);
private static volatile boolean STRICT_GLOBAL;
private static final ThreadLocal<Boolean> STRICT_LOCAL = new ThreadLocal<Boolean>() {
@Override
protected Boolean initialValue() {
return false;
}
@Override
public Boolean get() {
if (STRICT_GLOBAL) return true;
return super.get();
}
};
private static final AtomicReference<IconTransform> ourTransform = new AtomicReference<>(IconTransform.getDefault());
static {
installPathPatcher(new DeprecatedDuplicatesIconPathPatcher());
}
private static final ImageIcon EMPTY_ICON = new ImageIcon(UIUtil.createImage(1, 1, BufferedImage.TYPE_3BYTE_BGR)) {
@NonNls
public String toString() {
return "Empty icon " + super.toString();
}
};
private static boolean ourIsActivated;
private IconLoader() { }
public static <T, E extends Throwable> T performStrictly(ThrowableComputable<T, E> computable) throws E {
STRICT_LOCAL.set(true);
try {
return computable.compute();
} finally {
STRICT_LOCAL.set(false);
}
}
public static void setStrictGlobally(boolean strict) {
STRICT_GLOBAL = strict;
}
private static void updateTransform(@NotNull Function<? super IconTransform, IconTransform> updater) {
IconTransform prev, next;
do {
prev = ourTransform.get();
next = updater.apply(prev);
}
while (!ourTransform.compareAndSet(prev, next));
if (prev != next) {
ourIconsCache.clear();
ourIcon2DisabledIcon.clear();
//clears svg cache
ImageDescriptor.clearCache();
}
}
public static void installPathPatcher(@NotNull final IconPathPatcher patcher) {
updateTransform(transform -> transform.withPathPatcher(patcher));
}
public static void removePathPatcher(@NotNull final IconPathPatcher patcher) {
updateTransform(transform -> transform.withoutPathPatcher(patcher));
}
/**
* @deprecated use {@link JBImageIcon}
*/
@Deprecated
@NotNull
public static Icon getIcon(@NotNull final Image image) {
return new JBImageIcon(image);
}
public static void setUseDarkIcons(final boolean useDarkIcons) {
updateTransform(transform -> transform.withDark(useDarkIcons));
}
public static void setFilter(final ImageFilter filter) {
updateTransform(transform -> transform.withFilter(filter));
}
public static void clearCache() {
// Copy the transform to trigger update of cached icons
updateTransform(IconTransform::copy);
}
//TODO[kb] support iconsets
//public static Icon getIcon(@NotNull final String path, @NotNull final String darkVariantPath) {
// return new InvariantIcon(getIcon(path), getIcon(darkVariantPath));
@NotNull
public static Icon getIcon(@NonNls @NotNull final String path) {
Class callerClass = ReflectionUtil.getGrandCallerClass();
assert callerClass != null : path;
return getIcon(path, callerClass);
}
@Nullable
public static Icon getReflectiveIcon(@NotNull String path, ClassLoader classLoader) {
try {
@NonNls String packageName = path.startsWith("AllIcons.") ? "com.intellij.icons." : "icons.";
Class<?> aClass = Class.forName(packageName + path.substring(0, path.lastIndexOf('.')).replace('.', '$'), true, classLoader);
Field field = aClass.getField(path.substring(path.lastIndexOf('.') + 1));
field.setAccessible(true);
return (Icon)field.get(null);
}
catch (Exception e) {
return null;
}
}
/**
* Might return null if icon was not found.
* Use only if you expected null return value, otherwise see {@link IconLoader#getIcon(String)}
*/
@Nullable
public static Icon findIcon(@NonNls @NotNull String path) {
Class callerClass = ReflectionUtil.getGrandCallerClass();
if (callerClass == null) return null;
return findIcon(path, callerClass);
}
@Nullable
public static Icon findIcon(@NonNls @NotNull String path, boolean strict) {
Class callerClass = ReflectionUtil.getGrandCallerClass();
if (callerClass == null) return null;
return findIcon(path, callerClass, false, strict);
}
@NotNull
public static Icon getIcon(@NotNull String path, @NotNull final Class aClass) {
Icon icon = findIcon(path, aClass, aClass.getClassLoader(), HandleNotFound.strict(STRICT_LOCAL.get()), true);
if (icon == null) {
LOG.error("Icon cannot be found in '" + path + "', aClass='" + aClass + "'");
}
return icon; // [tav] todo: can't fix it
}
public static void activate() {
ourIsActivated = true;
}
@TestOnly
public static void deactivate() {
ourIsActivated = false;
}
private static boolean isLoaderDisabled() {
return !ourIsActivated;
}
@Nullable
public static Icon findLafIcon(@NotNull String key, @NotNull Class aClass, boolean strict) {
return findIcon(LAF_PREFIX + key + ".png", aClass, true, strict);
}
/**
* Might return null if icon was not found.
* Use only if you expected null return value, otherwise see {@link IconLoader#getIcon(String, Class)}
*/
@Nullable
public static Icon findIcon(@NotNull final String path, @NotNull final Class aClass) {
return findIcon(path, aClass, false);
}
@Nullable
public static Icon findIcon(@NotNull String path, @NotNull final Class aClass, boolean computeNow) {
return findIcon(path, aClass, computeNow, STRICT_LOCAL.get());
}
@Nullable
public static Icon findIcon(@NotNull String path, @NotNull Class aClass, @SuppressWarnings("unused") boolean computeNow, boolean strict) {
return findIcon(path, aClass, aClass.getClassLoader(), HandleNotFound.strict(strict), false);
}
private static boolean isReflectivePath(@NotNull String path) {
if (path.isEmpty() || path.charAt(0) == '/') {
return false;
}
int dotIndex = path.indexOf('.');
if (dotIndex < 0) {
return false;
}
int suffixLength = "Icons".length();
return path.regionMatches(dotIndex - suffixLength, "Icons", 0, suffixLength);
}
@Nullable
public static Icon findIcon(URL url) {
return findIcon(url, true);
}
@Nullable
public static Icon findIcon(URL url, boolean useCache) {
if (url == null) {
return null;
}
Pair<String, Object> key = Pair.create(ICON_CACHE_URL_KEY, url);
CachedImageIcon icon = ourIconsCache.get(key);
if (icon == null) {
icon = new CachedImageIcon(url, useCache);
if (useCache) {
icon = ConcurrencyUtil.cacheOrGet(ourIconsCache, key, icon);
}
}
return icon;
}
@Nullable
private static Icon findIcon(@NotNull String originalPath,
@Nullable Class clazz,
@NotNull ClassLoader classLoader,
HandleNotFound handleNotFound,
boolean deferUrlResolve) {
Pair<String, ClassLoader> patchedPath = ourTransform.get().patchPath(originalPath, classLoader);
String path = patchedPath.first;
if (patchedPath.second != null) {
classLoader = patchedPath.second;
}
if (isReflectivePath(path)) {
return getReflectiveIcon(path, classLoader);
}
Pair<String, Object> key = Pair.create(originalPath, classLoader);
CachedImageIcon cachedIcon = ourIconsCache.get(key);
if (cachedIcon == null) {
cachedIcon = CachedImageIcon.create(originalPath, path, classLoader, clazz, handleNotFound, deferUrlResolve);
if (cachedIcon == null) {
return null;
}
cachedIcon = ConcurrencyUtil.cacheOrGet(ourIconsCache, key, cachedIcon);
}
ScaleContext scaleContext = ScaleContext.create();
if (!cachedIcon.getScaleContext().equals(scaleContext)) {
// honor scale context as the cache doesn't do that
cachedIcon = cachedIcon.copy();
cachedIcon.updateScaleContext(scaleContext);
}
return cachedIcon;
}
@Nullable
public static Icon findIcon(@NotNull String path, @NotNull ClassLoader classLoader) {
return findIcon(path, null, classLoader, HandleNotFound.strict(false), false);
}
@Nullable
public static Image toImage(@NotNull Icon icon) {
return toImage(icon, null);
}
@Nullable
public static Image toImage(@NotNull Icon icon, @Nullable ScaleContext ctx) {
if (icon instanceof RetrievableIcon) {
icon = ((RetrievableIcon)icon).retrieveIcon();
}
if (icon instanceof CachedImageIcon) {
icon = ((CachedImageIcon)icon).getRealIcon(ctx);
}
if (icon == null) return null;
if (icon instanceof ImageIcon) {
return ((ImageIcon)icon).getImage();
}
else {
BufferedImage image;
if (GraphicsEnvironment.isHeadless()) { // for testing purpose
image = UIUtil.createImage(ctx, icon.getIconWidth(), icon.getIconHeight(), BufferedImage.TYPE_INT_ARGB, ROUND);
} else {
if (ctx == null) ctx = ScaleContext.create();
image = GraphicsEnvironment.getLocalGraphicsEnvironment()
.getDefaultScreenDevice().getDefaultConfiguration()
.createCompatibleImage(ROUND.round(ctx.apply(icon.getIconWidth(), DEV_SCALE)),
ROUND.round(ctx.apply(icon.getIconHeight(), DEV_SCALE)),
Transparency.TRANSLUCENT);
if (StartupUiUtil.isJreHiDPI(ctx)) {
image = (BufferedImage)ImageUtil.ensureHiDPI(image, ctx, icon.getIconWidth(), icon.getIconHeight());
}
}
Graphics2D g = image.createGraphics();
try {
icon.paintIcon(null, g, 0, 0);
} finally {
g.dispose();
}
return image;
}
}
@Contract("null, _, _->null; !null, _, _->!null")
public static Icon copy(@Nullable Icon icon, @Nullable Component ancestor, boolean deepCopy) {
if (icon == null) return null;
if (icon instanceof CopyableIcon) {
return deepCopy ? ((CopyableIcon)icon).deepCopy() : ((CopyableIcon)icon).copy();
}
BufferedImage image = UIUtil.createImage(ancestor, icon.getIconWidth(), icon.getIconHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g = image.createGraphics();
try {
icon.paintIcon(ancestor, g, 0, 0);
} finally {
g.dispose();
}
return new JBImageIcon(image);
}
@Nullable
private static ImageIcon checkIcon(final @Nullable Image image, @NotNull CachedImageIcon cii) {
if (image == null || image.getHeight(null) < 1) { // image wasn't loaded or broken
return null;
}
final ImageIcon icon = new JBImageIcon(image);
if (!isGoodSize(icon)) {
LOG.error("Invalid icon: " + cii); // # 22481
return EMPTY_ICON;
}
return icon;
}
public static boolean isGoodSize(@NotNull final Icon icon) {
return icon.getIconWidth() > 0 && icon.getIconHeight() > 0;
}
/**
* Gets (creates if necessary) disabled icon based on the passed one.
*
* @return {@code ImageIcon} constructed from disabled image of passed icon.
*/
@Nullable
public static Icon getDisabledIcon(Icon icon) {
if (!ourIsActivated) {
return icon;
}
if (icon instanceof LazyIcon) icon = ((LazyIcon)icon).getOrComputeIcon();
if (icon == null) return null;
Icon disabledIcon = ourIcon2DisabledIcon.get(icon);
if (disabledIcon == null) {
disabledIcon = filterIcon(icon, UIUtil::getGrayFilter/* returns laf-aware instance */, null); // [tav] todo: lack ancestor
ourIcon2DisabledIcon.put(icon, disabledIcon);
}
return disabledIcon;
}
/**
* Creates new icon with the filter applied.
*/
@Nullable
public static Icon filterIcon(@NotNull Icon icon, @NotNull Supplier<? extends RGBImageFilter> filterSupplier, @Nullable Component ancestor) {
if (icon instanceof LazyIcon) icon = ((LazyIcon)icon).getOrComputeIcon();
if (icon == null) return null;
if (!isGoodSize(icon)) {
LOG.error(icon); // # 22481
return EMPTY_ICON;
}
if (icon instanceof CachedImageIcon) {
icon = ((CachedImageIcon)icon).createWithFilter(filterSupplier);
} else {
double scale;
ScaleContextSupport ctxSupport = getScaleContextSupport(icon);
if (ctxSupport != null) {
scale = ctxSupport.getScale(SYS_SCALE);
}
else {
scale = StartupUiUtil.isJreHiDPI() ? JBUIScale.sysScale(ancestor) : 1f;
}
@SuppressWarnings("UndesirableClassUsage")
BufferedImage image = new BufferedImage((int)(scale * icon.getIconWidth()), (int)(scale * icon.getIconHeight()), BufferedImage.TYPE_INT_ARGB);
final Graphics2D graphics = image.createGraphics();
graphics.setColor(UIUtil.TRANSPARENT_COLOR);
graphics.fillRect(0, 0, icon.getIconWidth(), icon.getIconHeight());
graphics.scale(scale, scale);
icon.paintIcon(LabelHolder.ourFakeComponent, graphics, 0, 0);
graphics.dispose();
Image img = ImageUtil.filter(image, filterSupplier.get());
if (StartupUiUtil.isJreHiDPI(ancestor)) img = RetinaImage.createFrom(img, scale, null);
icon = new JBImageIcon(img);
}
return icon;
}
@NotNull
public static Icon getTransparentIcon(@NotNull final Icon icon) {
return getTransparentIcon(icon, 0.5f);
}
@NotNull
public static Icon getTransparentIcon(@NotNull final Icon icon, final float alpha) {
return new RetrievableIcon() {
@Override
public Icon retrieveIcon() {
return icon;
}
@Override
public int getIconHeight() {
return icon.getIconHeight();
}
@Override
public int getIconWidth() {
return icon.getIconWidth();
}
@Override
public void paintIcon(final Component c, final Graphics g, final int x, final int y) {
final Graphics2D g2 = (Graphics2D)g;
final Composite saveComposite = g2.getComposite();
g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_ATOP, alpha));
icon.paintIcon(c, g2, x, y);
g2.setComposite(saveComposite);
}
};
}
/**
* Gets a snapshot of the icon, immune to changes made by these calls:
* {@link #setFilter(ImageFilter)}, {@link #setUseDarkIcons(boolean)}
*
* @param icon the source icon
* @return the icon snapshot
*/
@NotNull
public static Icon getIconSnapshot(@NotNull Icon icon) {
if (icon instanceof CachedImageIcon) {
return ((CachedImageIcon)icon).getRealIcon();
}
return icon;
}
/**
* For internal usage. Converts the icon to 1x scale when applicable.
*/
@ApiStatus.Internal
public static Icon getMenuBarIcon(Icon icon, boolean dark) {
if (icon instanceof RetrievableIcon) {
icon = ((RetrievableIcon)icon).retrieveIcon();
}
if (icon instanceof MenuBarIconProvider) {
return ((MenuBarIconProvider)icon).getMenuBarIcon(dark);
}
return icon;
}
/**
* Returns a copy of the provided {@code icon} with darkness set to {@code dark}.
* The method takes effect on a {@link CachedImageIcon} (or its wrapper) only.
*/
public static Icon getDarkIcon(Icon icon, boolean dark) {
if (icon instanceof RetrievableIcon) {
icon = getOrigin((RetrievableIcon)icon);
}
if (icon instanceof DarkIconProvider) {
return ((DarkIconProvider)icon).getDarkIcon(dark);
}
return icon;
}
@SuppressWarnings("UnnecessaryFullyQualifiedName")
public static final class CachedImageIcon extends com.intellij.ui.icons.LazyImageIcon {
@Nullable private final String myOriginalPath;
@NotNull private volatile MyUrlResolver myResolver;
@Nullable("when not overridden") private final Boolean myDarkOverridden;
@NotNull private volatile IconTransform myTransform;
private final boolean myUseCacheOnLoad;
@Nullable private final Supplier<? extends RGBImageFilter> myLocalFilterSupplier;
private final MyScaledIconsCache myScaledIconsCache = new MyScaledIconsCache();
public CachedImageIcon(@NotNull URL url) {
this(url, true);
}
CachedImageIcon(@Nullable URL url, boolean useCacheOnLoad) {
this(new MyUrlResolver(url, null), null, useCacheOnLoad);
}
private CachedImageIcon(@NotNull MyUrlResolver urlResolver, @Nullable String originalPath, boolean useCacheOnLoad)
{
this(originalPath, urlResolver, null, useCacheOnLoad, ourTransform.get(), null);
}
private CachedImageIcon(@Nullable String originalPath,
@NotNull MyUrlResolver resolver,
@Nullable Boolean darkOverridden,
boolean useCacheOnLoad,
@NotNull IconTransform transform,
@Nullable Supplier<? extends RGBImageFilter> localFilterSupplier)
{
myOriginalPath = originalPath;
myResolver = resolver;
myDarkOverridden = darkOverridden;
myUseCacheOnLoad = useCacheOnLoad;
myTransform = transform;
myLocalFilterSupplier = localFilterSupplier;
}
@Contract("_, _, _, _, _, true -> !null")
static CachedImageIcon create(@NotNull String originalPath,
@Nullable String pathToResolve,
@NotNull ClassLoader classLoader,
@Nullable Class clazz,
HandleNotFound handleNotFound,
boolean deferUrlResolve)
{
MyUrlResolver resolver = new MyUrlResolver(pathToResolve == null ? originalPath : pathToResolve, clazz, classLoader, handleNotFound);
CachedImageIcon icon = new CachedImageIcon(resolver, originalPath, true);
if (!deferUrlResolve && icon.getURL() == null) return null;
return icon;
}
@Nullable
public String getOriginalPath() {
return myOriginalPath;
}
@Override
@NotNull
protected ImageIcon getRealIcon(@Nullable ScaleContext ctx) {
if (!isValid()) {
if (isLoaderDisabled()) return EMPTY_ICON;
synchronized (myLock) {
if (!isValid()) {
myTransform = ourTransform.get();
myResolver.resolve();
myRealIcon = null;
myScaledIconsCache.clear();
if (myOriginalPath != null) {
myResolver = myResolver.patch(myOriginalPath, myTransform);
}
}
}
}
Object realIcon = myRealIcon;
synchronized (myLock) {
if (!updateScaleContext(ctx) && realIcon != null) {
// try returning the current icon as the context is up-to-date
ImageIcon icon = unwrapIcon(realIcon);
if (icon != null) return icon;
}
ImageIcon icon = myScaledIconsCache.getOrScaleIcon(1f);
if (icon != null) {
myRealIcon = icon.getIconWidth() < 50 && icon.getIconHeight() < 50 ? icon : new SoftReference<>(icon);
return icon;
}
}
return EMPTY_ICON;
}
private boolean isValid() {
return myTransform == ourTransform.get() && myResolver.isResolved();
}
@Override
public String toString() {
if (myResolver.isResolved()) {
URL url = myResolver.getURL();
if (url != null) return url.toString();
}
return myOriginalPath != null ? myOriginalPath : "unknown path";
}
@NotNull
@Override
public Icon scale(float scale) {
if (scale == 1f) return this;
getRealIcon(); // force state update & cache reset
Icon icon = myScaledIconsCache.getOrScaleIcon(scale);
if (icon != null) {
return icon;
}
return this;
}
@Override
public Icon getDarkIcon(boolean isDark) {
return new CachedImageIcon(myOriginalPath, myResolver, isDark, myUseCacheOnLoad, myTransform, myLocalFilterSupplier);
}
@Override
public Icon getMenuBarIcon(boolean isDark) {
Image img = loadFromUrl(ScaleContext.createIdentity(), isDark);
if (img != null) {
return new ImageIcon(img);
}
return this;
}
@NotNull
@Override
public CachedImageIcon copy() {
return new CachedImageIcon(myOriginalPath, myResolver, myDarkOverridden, myUseCacheOnLoad, myTransform, myLocalFilterSupplier);
}
@NotNull
private Icon createWithFilter(@NotNull Supplier<? extends RGBImageFilter> filterSupplier) {
return new CachedImageIcon(myOriginalPath, myResolver, myDarkOverridden, myUseCacheOnLoad, myTransform, filterSupplier);
}
private boolean isDark() {
return myDarkOverridden == null ? myTransform.isDark() : myDarkOverridden;
}
@Nullable
private ImageFilter[] getFilters() {
ImageFilter global = myTransform.getFilter();
ImageFilter local = myLocalFilterSupplier != null ? myLocalFilterSupplier.get() : null;
if (global != null && local != null) {
return new ImageFilter[] {global, local};
}
else if (global != null) {
return new ImageFilter[] {global};
}
else if (local != null) {
return new ImageFilter[] {local};
}
return null;
}
@Nullable
public URL getURL() {
return myResolver.getURL();
}
@Nullable
private Image loadFromUrl(@NotNull ScaleContext ctx, boolean dark) {
int flags = ImageLoader.FIND_SVG | ImageLoader.ALLOW_FLOAT_SCALING;
if (myUseCacheOnLoad) {
flags |= ImageLoader.USE_CACHE;
}
if (dark) {
flags |= ImageLoader.DARK;
}
String path = myResolver.myOverriddenPath;
Class aClass = myResolver.myClass;
if (aClass != null && path != null) {
return ImageLoader.loadFromUrl(path, aClass, flags, getFilters(), ctx);
}
URL url = getURL();
if (url == null) {
return null;
}
return ImageLoader.loadFromUrl(url, null, flags, getFilters(), ctx);
}
private final class MyScaledIconsCache {
private static final int SCALED_ICONS_CACHE_LIMIT = 5;
private final Map<Couple<Double>, SoftReference<ImageIcon>> scaledIconsCache = Collections.synchronizedMap(
new FixedHashMap<>(SCALED_ICONS_CACHE_LIMIT));
private Couple<Double> key(@NotNull ScaleContext ctx) {
return new Couple<>(ctx.getScale(EFF_USR_SCALE), ctx.getScale(SYS_SCALE));
}
/**
* Retrieves the orig icon scaled by the provided scale.
*/
ImageIcon getOrScaleIcon(final float scale) {
ScaleContext ctx = getScaleContext();
if (scale != 1) {
ctx = ctx.copy();
ctx.setScale(OBJ_SCALE.of(scale));
}
ImageIcon icon = SoftReference.dereference(scaledIconsCache.get(key(ctx)));
if (icon != null) {
return icon;
}
Image image = loadFromUrl(ctx, isDark());
icon = checkIcon(image, CachedImageIcon.this);
if (icon != null && 4L * icon.getIconWidth() * icon.getIconHeight() < ImageLoader.CACHED_IMAGE_MAX_SIZE) {
scaledIconsCache.put(key(ctx), new SoftReference<>(icon));
}
return icon;
}
public void clear() {
scaledIconsCache.clear();
}
}
enum HandleNotFound {
THROW_EXCEPTION {
@Override
void handle(String msg) {
throw new RuntimeException(msg);
}
},
LOG_ERROR {
@Override
void handle(String msg) {
LOG.error(msg);
}
},
IGNORE;
void handle(String msg) throws RuntimeException {}
static HandleNotFound strict(boolean strict) {
return strict ? THROW_EXCEPTION : IGNORE;
}
}
/**
* Used to defer URL resolve.
*/
private static final class MyUrlResolver {
@Nullable private final Class myClass;
@Nullable private final ClassLoader myClassLoader;
@Nullable private final String myOverriddenPath;
@NotNull private final HandleNotFound myHandleNotFound;
// Every myUrl write is performed before isResolved write (see resolve())
// and every myUrl read is performed after isResolved read (see getUrl()), thus
// no necessary to declare myUrl as volatile: happens-before is established via isResolved.
@Nullable private URL myUrl;
private volatile boolean isResolved;
MyUrlResolver(@Nullable URL url, @Nullable ClassLoader classLoader) {
myClass = null;
myOverriddenPath = null;
myClassLoader = classLoader;
myUrl = url;
myHandleNotFound = HandleNotFound.IGNORE;
isResolved = true;
}
MyUrlResolver(@Nullable URL url, @NotNull String path, @Nullable ClassLoader classLoader) {
myClass = null;
myOverriddenPath = path;
myClassLoader = classLoader;
myUrl = url;
myHandleNotFound = HandleNotFound.IGNORE;
isResolved = true;
}
MyUrlResolver(@NotNull String path, @Nullable Class clazz, @Nullable ClassLoader classLoader, @NotNull HandleNotFound handleNotFound) {
myOverriddenPath = path;
myClass = clazz;
myClassLoader = classLoader;
myHandleNotFound = handleNotFound;
if (!Registry.is("ide.icons.deferUrlResolve")) resolve();
}
boolean isResolved() {
return isResolved;
}
/**
* Resolves the URL if it's not yet resolved.
*/
MyUrlResolver resolve() throws RuntimeException {
if (isResolved) return this;
try {
URL url = null;
String path = myOverriddenPath;
if (path != null) {
if (myClassLoader != null) {
path = StringUtil.trimStart(path, "/"); // Paths in ClassLoader getResource shouldn't start with "/"
url = findURL(path, myClassLoader::getResource);
}
if (url == null && myClass != null) {
// Some plugins use findIcon("icon.png",IconContainer.class)
url = findURL(path, myClass::getResource);
}
}
if (url == null) {
myHandleNotFound.handle("Can't find icon in '" + path + "' near " + myClassLoader);
}
myUrl = url;
} finally {
isResolved = true;
}
return this;
}
@Nullable
URL getURL() {
if (!isResolved()) {
return resolve().myUrl;
}
return myUrl;
}
MyUrlResolver patch(@NotNull String originalPath, @NotNull IconTransform transform) {
Pair<String, ClassLoader> patchedPath = transform.patchPath(originalPath, myClassLoader);
ClassLoader classLoader = patchedPath.second != null ? patchedPath.second : myClassLoader;
String path = patchedPath.first;
if (classLoader != null && path != null && path.startsWith("/")) {
return new MyUrlResolver(path.substring(1), null, classLoader, myHandleNotFound).resolve();
}
//This use case for temp themes only. Here we want immediately replace existing icon to a local one
if (path != null && path.startsWith("file:/")) {
try {
return new MyUrlResolver(new URL(path), path.substring(1), classLoader).resolve();
} catch (MalformedURLException ignore) {}
}
return this;
}
@Nullable
@SuppressWarnings("DuplicateExpressions")
private static URL findURL(@NotNull String path, @NotNull Function<? super String, URL> urlProvider) {
URL url = urlProvider.apply(path);
if (url != null) return url;
// Find either PNG or SVG icon. The icon will then be wrapped into CachedImageIcon
// which will load proper icon version depending on the context - UI theme, DPI.
// SVG version, when present, has more priority than PNG.
// See for details: com.intellij.util.ImageLoader.ImageDescList#create
if (path.endsWith(".png")) {
path = path.substring(0, path.length() - 4) + ".svg";
}
else if (path.endsWith(".svg")) {
path = path.substring(0, path.length() - 4) + ".png";
}
else {
LOG.debug("unexpected path: ", path);
}
return urlProvider.apply(path);
}
}
}
public abstract static class LazyIcon extends ScaleContextSupport implements CopyableIcon, RetrievableIcon {
private boolean myWasComputed;
private volatile Icon myIcon;
private IconTransform myTransform = ourTransform.get();
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
if (updateScaleContext(ScaleContext.create((Graphics2D)g))) {
myIcon = null;
}
final Icon icon = getOrComputeIcon();
if (icon != null) {
icon.paintIcon(c, g, x, y);
}
}
@Override
public int getIconWidth() {
final Icon icon = getOrComputeIcon();
return icon != null ? icon.getIconWidth() : 0;
}
@Override
public int getIconHeight() {
final Icon icon = getOrComputeIcon();
return icon != null ? icon.getIconHeight() : 0;
}
protected final synchronized Icon getOrComputeIcon() {
IconTransform currentTransform = ourTransform.get();
if (!myWasComputed || myTransform != currentTransform || myIcon == null)
{
myTransform = currentTransform;
myWasComputed = true;
myIcon = compute();
}
return myIcon;
}
public final void load() {
getIconWidth();
}
protected abstract Icon compute();
@Nullable
@Override
public Icon retrieveIcon() {
return getOrComputeIcon();
}
@NotNull
@Override
public Icon copy() {
return IconLoader.copy(getOrComputeIcon(), null, false);
}
}
private static Icon getOrigin(RetrievableIcon icon) {
final int maxDeep = 10;
Icon origin = icon.retrieveIcon();
int level = 0;
while (origin instanceof RetrievableIcon && level < maxDeep) {
++level;
origin = ((RetrievableIcon)origin).retrieveIcon();
}
if (origin instanceof RetrievableIcon)
LOG.error("can't calculate origin icon (too deep in hierarchy), src: " + icon);
return origin;
}
/**
* Returns {@link ScaleContextSupport} which best represents this icon taking into account its compound structure,
* or null when not applicable.
*/
@Nullable
public static ScaleContextSupport getScaleContextSupport(@Nullable Icon icon) {
if (icon instanceof ScaleContextSupport) {
return (ScaleContextSupport)icon;
}
else if (icon instanceof RetrievableIcon) {
return getScaleContextSupport(((RetrievableIcon)icon).retrieveIcon());
}
else if (icon instanceof CompositeIcon) {
return getScaleContextSupport(((CompositeIcon)icon).getIcon(0));
}
return null;
}
private static class LabelHolder {
/**
* To get disabled icon with paint it into the image. Some icons require
* not null component to paint.
*/
private static final JComponent ourFakeComponent = new JLabel();
}
/**
* Immutable representation of a global transformation applied to all icons
*/
private static final class IconTransform {
private final boolean myDark;
private final @NotNull IconPathPatcher[] myPatchers;
private final @Nullable ImageFilter myFilter;
private IconTransform(boolean dark, @NotNull IconPathPatcher[] patchers, @Nullable ImageFilter filter) {
myDark = dark;
myPatchers = patchers;
myFilter = filter;
}
public boolean isDark() {
return myDark;
}
@Nullable
public ImageFilter getFilter() {
return myFilter;
}
public IconTransform withPathPatcher(IconPathPatcher patcher) {
return new IconTransform(myDark, ArrayUtil.append(myPatchers, patcher), myFilter);
}
public IconTransform withoutPathPatcher(IconPathPatcher patcher) {
IconPathPatcher[] newPatchers = ArrayUtil.remove(myPatchers, patcher);
return newPatchers == myPatchers ? this : new IconTransform(myDark, newPatchers, myFilter);
}
public IconTransform withFilter(ImageFilter filter) {
return filter == myFilter ? this : new IconTransform(myDark, myPatchers, filter);
}
public IconTransform withDark(boolean dark) {
return dark == myDark ? this : new IconTransform(dark, myPatchers, myFilter);
}
public Pair<String, ClassLoader> patchPath(@NotNull String path, ClassLoader classLoader) {
for (IconPathPatcher patcher : myPatchers) {
String newPath = patcher.patchPath(path, classLoader);
if (newPath == null) {
newPath = patcher.patchPath(path, null);
}
if (newPath != null) {
LOG.info("replace '" + path + "' with '" + newPath + "'");
ClassLoader contextClassLoader = patcher.getContextClassLoader(path, classLoader);
if (contextClassLoader == null) {
//noinspection deprecation
Class contextClass = patcher.getContextClass(path);
if (contextClass != null) {
contextClassLoader = contextClass.getClassLoader();
}
}
return Pair.create(newPath, contextClassLoader);
}
}
return Pair.create(path, null);
}
public IconTransform copy() {
return new IconTransform(myDark, myPatchers, myFilter);
}
public static IconTransform getDefault() {
return new IconTransform(UIUtil.isUnderDarcula(), new IconPathPatcher[0], null);
}
}
}
|
package com.s3auth.hosts;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.dynamodbv2.model.ScanRequest;
import com.amazonaws.services.dynamodbv2.model.ScanResult;
import com.jcabi.urn.URNMocker;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.mockito.Mockito;
/**
* Test case for {@link DefaultDynamo}.
* @author Yegor Bugayenko (yegor@tpc2.com)
* @version $Id$
* @checkstyle ClassDataAbstractionCoupling (500 lines)
*/
public final class DefaultDynamoTest {
/**
* DefaultDynamo can load configuration.
* @throws Exception If there is some problem inside
*/
@Test
public void loadsDynamoConfiguration() throws Exception {
final Dynamo dynamo = new DefaultDynamo(
new Dynamo.Client() {
@Override
public AmazonDynamoDB get() {
return DefaultDynamoTest.this.amazon();
}
},
"table"
);
MatcherAssert.assertThat(
dynamo.load().size(),
Matchers.equalTo(dynamo.load().size())
);
final int size = dynamo.load().size();
dynamo.add(new URNMocker().mock(), new DomainMocker().mock());
MatcherAssert.assertThat(
dynamo.load().size(),
Matchers.not(Matchers.equalTo(size))
);
dynamo.close();
}
/**
* Create and return a random amazon client.
* @return The client
*/
private AmazonDynamoDB amazon() {
final List<Map<String, AttributeValue>> items =
new LinkedList<Map<String, AttributeValue>>();
final int total = Math.abs(new Random().nextInt(20));
for (int num = 0; num < total; ++num) {
items.add(this.item());
}
final AmazonDynamoDB aws =
Mockito.mock(AmazonDynamoDB.class);
Mockito.doReturn(new ScanResult().withItems(items))
.when(aws).scan(Mockito.any(ScanRequest.class));
return aws;
}
/**
* Create and return a random amazon item.
* @return The client
*/
private Map<String, AttributeValue> item() {
final ConcurrentMap<String, AttributeValue> item =
new ConcurrentHashMap<String, AttributeValue>(0);
item.put(
DefaultDynamo.USER,
new AttributeValue(new URNMocker().mock().toString())
);
item.put(
DefaultDynamo.NAME,
new AttributeValue(
String.format(
"google-%d.com",
Math.abs(new Random().nextInt())
)
)
);
item.put(DefaultDynamo.KEY, new AttributeValue("aaaaaaaaaaaaaaaa"));
item.put(
DefaultDynamo.SECRET,
new AttributeValue("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
);
item.put(DefaultDynamo.REGION, new AttributeValue("s3"));
return item;
}
}
|
// checkstyle: Checks Java source code for adherence to a set of rules.
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.puppycrawl.tools.checkstyle.checks.javadoc;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.BailErrorStrategy;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.misc.ParseCancellationException;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
import com.google.common.base.CaseFormat;
import com.google.common.primitives.Ints;
import com.puppycrawl.tools.checkstyle.api.Check;
import com.puppycrawl.tools.checkstyle.api.DetailAST;
import com.puppycrawl.tools.checkstyle.api.DetailNode;
import com.puppycrawl.tools.checkstyle.api.JavadocTokenTypes;
import com.puppycrawl.tools.checkstyle.api.TokenTypes;
import com.puppycrawl.tools.checkstyle.grammars.javadoc.JavadocLexer;
import com.puppycrawl.tools.checkstyle.grammars.javadoc.JavadocParser;
/**
* Base class for Checks that process Javadoc comments.
* @author Baratali Izmailov
*/
public abstract class AbstractJavadocCheck extends Check {
/**
* Error message key for common javadoc errors.
*/
public static final String PARSE_ERROR_MESSAGE_KEY = "javadoc.parse.error";
/**
* Unrecognized error from antlr parser.
*/
public static final String UNRECOGNIZED_ANTLR_ERROR_MESSAGE_KEY =
"javadoc.unrecognized.antlr.error";
/**
* key is "line:column"
* value is DetailNode tree
*/
private static final Map<String, ParseStatus> TREE_CACHE = new HashMap<>();
/**
* Custom error listener.
*/
private final DescriptiveErrorListener errorListener =
new DescriptiveErrorListener();
/**
* DetailAST node of considered Javadoc comment that is just a block comment
* in Java language syntax tree.
*/
private DetailAST blockCommentAst;
/**
* Returns the default token types a check is interested in.
* @return the default token types
* @see JavadocTokenTypes
*/
public abstract int[] getDefaultJavadocTokens();
/**
* Called before the starting to process a tree.
* @param rootAst
* the root of the tree
*/
public void beginJavadocTree(DetailNode rootAst) {
// No code by default, should be overridden only by demand at subclasses
}
/**
* Called after finished processing a tree.
* @param rootAst
* the root of the tree
*/
public void finishJavadocTree(DetailNode rootAst) {
// No code by default, should be overridden only by demand at subclasses
}
/**
* Called to process a Javadoc token.
* @param ast
* the token to process
*/
public abstract void visitJavadocToken(DetailNode ast);
/**
* Called after all the child nodes have been process.
* @param ast
* the token leaving
*/
public void leaveJavadocToken(DetailNode ast) {
// No code by default, should be overridden only by demand at subclasses
}
/**
* Defined final to not allow JavadocChecks to change default tokens.
* @return default tokens
*/
@Override
public final int[] getDefaultTokens() {
return new int[] {TokenTypes.BLOCK_COMMENT_BEGIN };
}
/**
* Defined final because all JavadocChecks require comment nodes.
* @return true
*/
@Override
public final boolean isCommentNodesRequired() {
return true;
}
@Override
public final void beginTree(DetailAST rootAST) {
TREE_CACHE.clear();
}
@Override
public final void finishTree(DetailAST rootAST) {
TREE_CACHE.clear();
}
@Override
public final void visitToken(DetailAST blockCommentAst) {
if (JavadocUtils.isJavadocComment(blockCommentAst)) {
this.blockCommentAst = blockCommentAst;
final String treeCacheKey = blockCommentAst.getLineNo() + ":"
+ blockCommentAst.getColumnNo();
ParseStatus ps;
if (TREE_CACHE.containsKey(treeCacheKey)) {
ps = TREE_CACHE.get(treeCacheKey);
}
else {
ps = parseJavadocAsDetailNode(blockCommentAst);
TREE_CACHE.put(treeCacheKey, ps);
}
if (ps.getParseErrorMessage() == null) {
processTree(ps.getTree());
}
else {
final ParseErrorMessage parseErrorMessage = ps.getParseErrorMessage();
log(parseErrorMessage.getLineNumber(),
parseErrorMessage.getMessageKey(),
parseErrorMessage.getMessageArguments());
}
}
}
protected DetailAST getBlockCommentAst() {
return blockCommentAst;
}
/**
* Parses Javadoc comment as DetailNode tree.
* @param javadocCommentAst
* DetailAST of Javadoc comment
* @return DetailNode tree of Javadoc comment
*/
private ParseStatus parseJavadocAsDetailNode(DetailAST javadocCommentAst) {
final String javadocComment = JavadocUtils.getJavadocCommentContent(javadocCommentAst);
// Log messages should have line number in scope of file,
// not in scope of Javadoc comment.
// Offset is line number of beginning of Javadoc comment.
errorListener.setOffset(javadocCommentAst.getLineNo() - 1);
final ParseStatus result = new ParseStatus();
ParseTree parseTree = null;
ParseErrorMessage parseErrorMessage = null;
try {
parseTree = parseJavadocAsParseTree(javadocComment);
}
catch (ParseCancellationException e) {
// If syntax error occurs then message is printed by error listener
// and parser throws this runtime exception to stop parsing.
// Just stop processing current Javadoc comment.
parseErrorMessage = errorListener.getErrorMessage();
// There are cases when antlr error listener does not handle syntax error
if (parseErrorMessage == null) {
parseErrorMessage = new ParseErrorMessage(javadocCommentAst.getLineNo(),
UNRECOGNIZED_ANTLR_ERROR_MESSAGE_KEY,
javadocCommentAst.getColumnNo(), e.getMessage());
}
}
if (parseErrorMessage == null) {
final DetailNode tree = convertParseTree2DetailNode(parseTree);
result.setTree(tree);
}
else {
result.setParseErrorMessage(parseErrorMessage);
}
return result;
}
/**
* Converts ParseTree (that is generated by ANTLRv4) to DetailNode tree.
*
* @param parseTreeNode root node of ParseTree
* @return root of DetailNode tree
*/
private DetailNode convertParseTree2DetailNode(ParseTree parseTreeNode) {
final JavadocNodeImpl rootJavadocNode = createJavadocNode(parseTreeNode, null, -1);
int childCount = parseTreeNode.getChildCount();
JavadocNodeImpl[] children = new JavadocNodeImpl[childCount];
for (int i = 0; i < childCount; i++) {
final JavadocNodeImpl child = createJavadocNode(parseTreeNode.getChild(i),
rootJavadocNode, i);
children[i] = child;
}
rootJavadocNode.setChildren(children);
JavadocNodeImpl currentJavadocParent = rootJavadocNode;
ParseTree parseTreeParent = parseTreeNode;
while (currentJavadocParent != null) {
children = (JavadocNodeImpl[]) currentJavadocParent.getChildren();
childCount = children.length;
for (int i = 0; i < childCount; i++) {
final JavadocNodeImpl currentJavadocNode = children[i];
final ParseTree currentParseTreeNodeChild = parseTreeParent.getChild(i);
final JavadocNodeImpl[] subChildren =
new JavadocNodeImpl[currentJavadocNode.getChildren().length];
for (int j = 0; j < subChildren.length; j++) {
final JavadocNodeImpl child =
createJavadocNode(currentParseTreeNodeChild.getChild(j),
currentJavadocNode, j);
subChildren[j] = child;
}
currentJavadocNode.setChildren(subChildren);
}
if (childCount > 0) {
currentJavadocParent = children[0];
parseTreeParent = parseTreeParent.getChild(0);
}
else {
JavadocNodeImpl nextJavadocSibling = (JavadocNodeImpl) JavadocUtils
.getNextSibling(currentJavadocParent);
ParseTree nextParseTreeSibling = getNextSibling(parseTreeParent);
if (nextJavadocSibling == null) {
JavadocNodeImpl tempJavadocParent =
(JavadocNodeImpl) currentJavadocParent.getParent();
ParseTree tempParseTreeParent = parseTreeParent.getParent();
while (nextJavadocSibling == null && tempJavadocParent != null) {
nextJavadocSibling = (JavadocNodeImpl) JavadocUtils
.getNextSibling(tempJavadocParent);
nextParseTreeSibling = getNextSibling(tempParseTreeParent);
tempJavadocParent = (JavadocNodeImpl) tempJavadocParent.getParent();
tempParseTreeParent = tempParseTreeParent.getParent();
}
}
currentJavadocParent = nextJavadocSibling;
parseTreeParent = nextParseTreeSibling;
}
}
return rootJavadocNode;
}
/**
* Creates JavadocNodeImpl node on base of ParseTree node.
*
* @param parseTree ParseTree node
* @param parent DetailNode that will be parent of new node
* @param index child index that has new node
* @return JavadocNodeImpl node on base of ParseTree node.
*/
private JavadocNodeImpl createJavadocNode(ParseTree parseTree, DetailNode parent, int index) {
final JavadocNodeImpl node = new JavadocNodeImpl();
node.setText(parseTree.getText());
node.setColumnNumber(getColumn(parseTree));
node.setLineNumber(getLine(parseTree) + blockCommentAst.getLineNo());
node.setIndex(index);
node.setType(getTokenType(parseTree));
node.setParent(parent);
node.setChildren(new JavadocNodeImpl[parseTree.getChildCount()]);
return node;
}
/**
* Gets next sibling of ParseTree node.
* @param node ParseTree node
* @return next sibling of ParseTree node.
*/
private static ParseTree getNextSibling(ParseTree node) {
if (node.getParent() == null) {
return null;
}
final ParseTree parent = node.getParent();
final int childCount = parent.getChildCount();
int i = 0;
while (true) {
final ParseTree currentNode = parent.getChild(i);
if (currentNode.equals(node)) {
if (i == childCount - 1) {
return null;
}
return parent.getChild(i + 1);
}
i++;
}
}
/**
* Gets token type of ParseTree node from JavadocTokenTypes class.
* @param node ParseTree node.
* @return token type from JavadocTokenTypes
*/
private static int getTokenType(ParseTree node) {
int tokenType;
if (node.getChildCount() == 0) {
tokenType = ((TerminalNode) node).getSymbol().getType();
}
else {
final String className = getNodeClassNameWithoutContext(node);
final String typeName =
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, className);
tokenType = JavadocUtils.getTokenId(typeName);
}
return tokenType;
}
/**
* Gets class name of ParseTree node and removes 'Context' postfix at the
* end.
* @param node
* ParseTree node.
* @return class name without 'Context'
*/
private static String getNodeClassNameWithoutContext(ParseTree node) {
final String className = node.getClass().getSimpleName();
// remove 'Context' at the end
final int contextLength = 7;
return className.substring(0, className.length() - contextLength);
}
/**
* Gets line number from ParseTree node.
* @param tree
* ParseTree node
* @return line number
*/
private static int getLine(ParseTree tree) {
if (tree instanceof TerminalNode) {
return ((TerminalNode) tree).getSymbol().getLine() - 1;
}
else {
final ParserRuleContext rule = (ParserRuleContext) tree;
return rule.start.getLine() - 1;
}
}
/**
* Gets column number from ParseTree node.
* @param tree
* ParseTree node
* @return column number
*/
private static int getColumn(ParseTree tree) {
if (tree instanceof TerminalNode) {
return ((TerminalNode) tree).getSymbol().getCharPositionInLine();
}
else {
final ParserRuleContext rule = (ParserRuleContext) tree;
return rule.start.getCharPositionInLine();
}
}
/**
* Parses block comment content as javadoc comment.
* @param blockComment
* block comment content.
* @return parse tree
*/
private ParseTree parseJavadocAsParseTree(String blockComment) {
final ANTLRInputStream input = new ANTLRInputStream(blockComment);
final JavadocLexer lexer = new JavadocLexer(input);
// remove default error listeners
lexer.removeErrorListeners();
// add custom error listener that logs parsing errors
lexer.addErrorListener(errorListener);
final CommonTokenStream tokens = new CommonTokenStream(lexer);
final JavadocParser parser = new JavadocParser(tokens);
// remove default error listeners
parser.removeErrorListeners();
// add custom error listener that logs syntax errors
parser.addErrorListener(errorListener);
// This strategy stops parsing when parser error occurs.
// By default it uses Error Recover Strategy which is slow and useless.
parser.setErrorHandler(new BailErrorStrategy());
return parser.javadoc();
}
/**
* Processes JavadocAST tree notifying Check.
* @param root
* root of JavadocAST tree.
*/
private void processTree(DetailNode root) {
beginJavadocTree(root);
walk(root);
finishJavadocTree(root);
}
/**
* Processes a node calling Check at interested nodes.
* @param root
* the root of tree for process
*/
private void walk(DetailNode root) {
final int[] defaultTokenTypes = getDefaultJavadocTokens();
DetailNode curNode = root;
while (curNode != null) {
final boolean waitsFor = Ints.contains(defaultTokenTypes, curNode.getType());
if (waitsFor) {
visitJavadocToken(curNode);
}
DetailNode toVisit = JavadocUtils.getFirstChild(curNode);
while (curNode != null && toVisit == null) {
if (waitsFor) {
leaveJavadocToken(curNode);
}
toVisit = JavadocUtils.getNextSibling(curNode);
if (toVisit == null) {
curNode = curNode.getParent();
}
}
curNode = toVisit;
}
}
/**
* Custom error listener for JavadocParser that prints user readable errors.
*/
static class DescriptiveErrorListener extends BaseErrorListener {
/**
* Message key of error message. Missed close HTML tag breaks structure
* of parse tree, so parser stops parsing and generates such error
* message. This case is special because parser prints error like
* {@code "no viable alternative at input 'b \n *\n'"} and it is not
* clear that error is about missed close HTML tag.
*/
static final String JAVADOC_MISSED_HTML_CLOSE = "javadoc.missed.html.close";
/**
* Message key of error message.
*/
static final String JAVADOC_WRONG_SINGLETON_TAG =
"javadoc.wrong.singleton.html.tag";
/**
* Parse error while rule recognition.
*/
private static final String JAVADOC_PARSE_RULE_ERROR = "javadoc.parse.rule.error";
/**
* Offset is line number of beginning of the Javadoc comment. Log
* messages should have line number in scope of file, not in scope of
* Javadoc comment.
*/
private int offset;
/**
* Error message that appeared while parsing.
*/
private ParseErrorMessage errorMessage;
public ParseErrorMessage getErrorMessage() {
return errorMessage;
}
/**
* Sets offset. Offset is line number of beginning of the Javadoc
* comment. Log messages should have line number in scope of file, not
* in scope of Javadoc comment.
* @param offset
* offset line number
*/
public void setOffset(int offset) {
this.offset = offset;
}
/**
* Logs parser errors in Checkstyle manner. Parser can generate error
* messages. There is special error that parser can generate. It is
* missed close HTML tag. This case is special because parser prints
* error like {@code "no viable alternative at input 'b \n *\n'"} and it
* is not clear that error is about missed close HTML tag. Other error
* messages are not special and logged simply as "Parse Error...".
* <p>
* {@inheritDoc}
*/
@Override
public void syntaxError(
Recognizer<?, ?> recognizer, Object offendingSymbol,
int line, int charPositionInLine,
String msg, RecognitionException ex) {
final int lineNumber = offset + line;
final Token token = (Token) offendingSymbol;
if (JAVADOC_MISSED_HTML_CLOSE.equals(msg)) {
errorMessage = new ParseErrorMessage(lineNumber,
JAVADOC_MISSED_HTML_CLOSE, charPositionInLine, token.getText());
throw new ParseCancellationException();
}
else if (JAVADOC_WRONG_SINGLETON_TAG.equals(msg)) {
errorMessage = new ParseErrorMessage(lineNumber,
JAVADOC_WRONG_SINGLETON_TAG, charPositionInLine, token.getText());
throw new ParseCancellationException();
}
else {
final int ruleIndex = ex.getCtx().getRuleIndex();
final String ruleName = recognizer.getRuleNames()[ruleIndex];
final String upperCaseRuleName = CaseFormat.UPPER_CAMEL.to(
CaseFormat.UPPER_UNDERSCORE, ruleName);
errorMessage = new ParseErrorMessage(lineNumber,
JAVADOC_PARSE_RULE_ERROR, charPositionInLine, msg, upperCaseRuleName);
}
}
}
/**
* Contains result of parsing javadoc comment: DetailNode tree and parse
* error message.
*/
private static class ParseStatus {
/**
* DetailNode tree (is null if parsing fails)
*/
private DetailNode tree;
/**
* Parse error message (is null if parsing is successful)
*/
private ParseErrorMessage parseErrorMessage;
public DetailNode getTree() {
return tree;
}
public void setTree(DetailNode tree) {
this.tree = tree;
}
public ParseErrorMessage getParseErrorMessage() {
return parseErrorMessage;
}
public void setParseErrorMessage(ParseErrorMessage parseErrorMessage) {
this.parseErrorMessage = parseErrorMessage;
}
}
/**
* Contains information about parse error message.
*/
private static class ParseErrorMessage {
/**
* Line number where parse error occurred.
*/
private final int lineNumber;
/**
* Key for error message.
*/
private final String messageKey;
/**
* Error message arguments.
*/
private final Object[] messageArguments;
/**
* Initializes parse error message.
*
* @param lineNumber line number
* @param messageKey message key
* @param messageArguments message arguments
*/
public ParseErrorMessage(int lineNumber, String messageKey, Object ... messageArguments) {
this.lineNumber = lineNumber;
this.messageKey = messageKey;
this.messageArguments = messageArguments.clone();
}
public int getLineNumber() {
return lineNumber;
}
public String getMessageKey() {
return messageKey;
}
public Object[] getMessageArguments() {
return messageArguments.clone();
}
}
}
|
package web.component.impl.awselb.model;
import com.amazonaws.services.elasticloadbalancing.model.Instance;
import com.amazonaws.services.elasticloadbalancing.model.InstanceState;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import web.component.api.model.BackendInstance;
import web.component.api.model.BackendInstanceState;
import web.component.api.model.LoadBalancer;
/**
*
* @author Hiroshi
*/
public class BackendInstanceImpl implements BackendInstance{
private static final Map<String,BackendInstance> existBackendInstances = new HashMap<>();
private final Instance elbInstance = new Instance();
private BackendInstanceImpl(String id){
elbInstance.setInstanceId(id);
}
private BackendInstanceImpl(Builder builder){
elbInstance.setInstanceId(builder.id);
}
@Override
public LoadBalancer getLoadBalancer() {
throw new UnsupportedOperationException("Not yet supported.");
}
@Override
public List<LoadBalancer> getLoadBalancers(){
throw new UnsupportedOperationException("Not yet supported.");
}
@Override
public String getId() {
return elbInstance.getInstanceId();
}
@Override
public void registerWith(LoadBalancer newLb) {
if(newLb == null || !(newLb instanceof LoadBalancerImpl))
throw new IllegalArgumentException("Invalid load balancer specified.");
newLb.registerInstance(this);
}
@Override
public void deregisterFrom(LoadBalancer lb) {
if(lb == null || !(lb instanceof LoadBalancerImpl))
throw new IllegalArgumentException("Invalid load balancer specified.");
lb.deregisterInstance(this);
}
@Override
public boolean equals(Object toCompare){
if(toCompare instanceof BackendInstanceImpl)
return this.getId().equals(((BackendInstanceImpl)toCompare).getId());
return false;
}
@Override
public int hashCode() {
//this is wrong, but I don't know how to implement this method.
return 31 * getId().hashCode();
}
@Override
public BackendInstanceState getState(){
throw new UnsupportedOperationException("Not yet supported.");
}
@Override
public BackendInstanceState getStateFromLB(LoadBalancer lb){
return lb.getInstanceState(this);
}
@Override
public String toString(){
return "{BackendInstanceID: " + getId() + "}";
}
public static class State implements BackendInstanceState{
private final InstanceState elbInstanceState;
private State(InstanceState elbInstanceState){
this.elbInstanceState = elbInstanceState;
}
public static State create(InstanceState elbInstanceState){
return new State(elbInstanceState);
}
@Override
public String getDescription() {
return elbInstanceState.getDescription();
}
@Override
public String getId() {
return elbInstanceState.getInstanceId();
}
@Override
public String getReasonCode() {
return elbInstanceState.getReasonCode();
}
@Override
public String getState() {
return elbInstanceState.getState();
}
@Override
public String toString(){
return getDescription();
}
}
public static class Builder {
private String id;
public Builder id(String id){
this.id = id;
return this;
}
public BackendInstance build(){
if(existBackendInstances.get(id) == null)
existBackendInstances.put(id, new BackendInstanceImpl(this));
return existBackendInstances.get(id);
}
}
}
|
package org.jkiss.dbeaver;
import org.eclipse.core.runtime.ILog;
import org.eclipse.core.runtime.Status;
import org.jkiss.dbeaver.bundle.ModelActivator;
import org.jkiss.dbeaver.utils.GeneralUtils;
import java.io.PrintStream;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Log
*/
public class Log
{
private static String corePluginID = ModelPreferences.PLUGIN_ID;
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
private static final ILog eclipseLog = ModelActivator.getInstance().getLog();
private final String name;
public static Log getLog(Class<?> forClass) {
return new Log(forClass.getName());
}
private Log(String name)
{
this.name = name;
}
public String getName()
{
return name;
}
public boolean isDebugEnabled()
{
return true;
}
public boolean isErrorEnabled()
{
return true;
}
public boolean isFatalEnabled()
{
return true;
}
public boolean isInfoEnabled()
{
return true;
}
public boolean isTraceEnabled()
{
return false;
}
public boolean isWarnEnabled()
{
return true;
}
public void trace(Object message)
{
}
public void trace(Object message, Throwable t)
{
}
public void debug(Object message)
{
if (message instanceof Throwable) {
debug(message.toString(), (Throwable)message);
} else {
debug(message, null);
}
}
public void debug(Object message, Throwable t)
{
ModelActivator activator = ModelActivator.getInstance();
debugMessage(message, t, System.err);
if (activator != null) {
debugMessage(message, t, activator.getDebugWriter());
}
}
private static void debugMessage(Object message, Throwable t, PrintStream debugWriter) {
synchronized (Log.class) {
debugWriter.print(sdf.format(new Date()) + " - ");
if (t == null) {
debugWriter.println(message);
} else {
t.printStackTrace(debugWriter);
}
debugWriter.flush();
}
}
public void info(Object message)
{
if (message instanceof Throwable) {
info(message.toString(), (Throwable)message);
return;
}
debugMessage(message, null, System.err);
eclipseLog.log(new Status(
Status.INFO,
corePluginID,
message == null ? null : message.toString()));
}
public void info(Object message, Throwable t)
{
writeExceptionStatus(Status.INFO, message, t);
}
public void warn(Object message)
{
if (message instanceof Throwable) {
warn(message.toString(), (Throwable)message);
return;
}
debugMessage(message, null, System.err);
ModelActivator.getInstance().getLog().log(new Status(
Status.WARNING,
corePluginID,
message == null ? null : message.toString()));
}
public void warn(Object message, Throwable t)
{
writeExceptionStatus(Status.WARNING, message, t);
}
public void error(Object message)
{
if (message instanceof Throwable) {
error(message.toString(), (Throwable)message);
return;
}
debugMessage(message, null, System.err);
ModelActivator.getInstance().getLog().log(new Status(
Status.ERROR,
corePluginID,
message == null ? null : message.toString()));
}
public void error(Object message, Throwable t)
{
writeExceptionStatus(Status.ERROR, message, t);
}
public void fatal(Object message)
{
error(message);
}
public void fatal(Object message, Throwable t)
{
error(message, t);
}
private static void writeExceptionStatus(int severity, Object message, Throwable t)
{
debugMessage(message, t, System.err);
ModelActivator activator = ModelActivator.getInstance();
if (activator != null) {
// Activator may be null in some unclear circumstances (like shutdown is in progress)
ILog log = activator.getLog();
if (log != null) {
if (t == null) {
log.log(new Status(
severity,
corePluginID,
message == null ? null : message.toString()));
} else {
if (message == null) {
log.log(GeneralUtils.makeExceptionStatus(severity, t));
} else {
log.log(GeneralUtils.makeExceptionStatus(severity, message.toString(), t));
}
}
}
}
}
}
|
package com.sybit.education.taschengeldboerse.controller;
import com.sybit.education.taschengeldboerse.domain.Schueler;
import com.sybit.education.taschengeldboerse.domain.User;
import com.sybit.education.taschengeldboerse.model.SchuelerForm;
import com.sybit.education.taschengeldboerse.service.SchuelerService;
import com.sybit.education.taschengeldboerse.service.UserService;
import org.hibernate.exception.ConstraintViolationException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
/**
* Handles requests for the application home page.
*/
@Controller
public class SchuelerController {
@Autowired
private UserService userService;
@Autowired
private SchuelerService schuelerService;
@RequestMapping(value = "/registrieren/schueler", method = RequestMethod.GET)
public ModelAndView registrierenFormular(final HttpServletRequest request) {
ModelAndView modelAndView = new ModelAndView();
modelAndView.addObject("schuelerForm", new SchuelerForm());
modelAndView.setViewName("registrieren-schueler");
return modelAndView;
}
@RequestMapping(value = "/registrieren/schueler", method = RequestMethod.POST)
public ModelAndView saveForm(@ModelAttribute("schueler") SchuelerForm schuelerForm) {
ModelAndView modelAndView = new ModelAndView();
try {
User user = new User();
user.setEmail(schuelerForm.getEmail());
user.setPassword(schuelerForm.getPassword());
user.setAuthority("ROLE_SCHUELER");
user.setEnabled(true);
userService.addUser(user);
Schueler schueler = new Schueler();
schueler.setAnrede(schuelerForm.getAnrede());
schueler.setName(schuelerForm.getName());
schueler.setVorname(schuelerForm.getVorname());
schueler.setEmail(schuelerForm.getEmail());
schueler.setGeburtsdatum(schuelerForm.getDOBDay() + "." + schuelerForm.getDOBMonth() + "." + schuelerForm.getDOBYear());
schueler.setPlz(schuelerForm.getPlz());
schueler.setWohnort(schuelerForm.getWohnort());
userService.saveSchueler(schueler);
modelAndView.setViewName("job-liste");
} catch (IllegalArgumentException e) {
modelAndView.addObject("addEmailFail", true);
modelAndView.addObject("emailMessage", e.getMessage());
modelAndView.setViewName("registrieren-schueler");
} catch (ConstraintViolationException e) {
modelAndView.addObject("schueler", schuelerForm);
modelAndView.addObject("addFail", true);
modelAndView.setViewName("registrieren-schueler");
}
return modelAndView;
}
@RequestMapping(value = "schueler/profil", method = RequestMethod.GET)
public ModelAndView schuelerProfil(@RequestParam("id") Integer schuelerId) {
ModelAndView modelAndView = new ModelAndView("schueler-detail");
modelAndView.addObject("schueler", schuelerService.findSchuelerById(schuelerId));
return modelAndView;
}
@RequestMapping(value = "schueler/profil/nav", method = RequestMethod.GET)
public ModelAndView schuelerProfil(@RequestParam("username") String email) {
ModelAndView modelAndView = new ModelAndView("schueler-detail");
modelAndView.addObject("schueler", userService.getSchuelerByEmail(email));
return modelAndView;
}
}
|
package de.uni_potsdam.hpi.bpt.bp2014.jcore;
import de.uni_potsdam.hpi.bpt.bp2014.database.DbWebServiceTask;
import org.apache.log4j.Logger;
import org.json.JSONArray;
import org.json.JSONObject;
import javax.ws.rs.client.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.LinkedList;
/**
* This is the execution behavior for webservice tasks
*/
public class WebServiceTaskExecutionBehavior extends TaskExecutionBehavior {
static Logger log = Logger.getLogger(WebServiceTaskExecutionBehavior.class.getName());
/**
* DB Connection class.
*/
DbWebServiceTask dbWebServiceTask = new DbWebServiceTask();
/**
* Initializes the webservice task.
*
* @param activityInstance_id The id of the webservice task.
* @param scenarioInstance The instance of the ScenarioInstance.
* @param controlNodeInstance The instance of the ControlNodeInstance (ActivityInstance).
*/
public WebServiceTaskExecutionBehavior(int activityInstance_id, ScenarioInstance scenarioInstance, ControlNodeInstance controlNodeInstance) {
super(activityInstance_id, scenarioInstance, controlNodeInstance);
}
@Override
public void execute() {
String link = dbWebServiceTask.getLinkForControlNode(controlNodeInstance.getControlNode_id());
for (DataAttributeInstance dataAttributeInstance : scenarioInstance.getDataAttributeInstances().values()) {
link = link.replace(
"#" + (dataAttributeInstance.getDataObjectInstance()).getName()
+ "." + dataAttributeInstance.getName(), dataAttributeInstance.getValue().toString());
}
Client client = ClientBuilder.newClient();
String[] url = link.split("\\?");
WebTarget webResource = client.target(url[0]);
if (url.length > 1) {
String[] params = url[1].split("&");
for (String param : params) {
String[] values = param.split("=");
webResource = webResource.queryParam(values[0], values[1]);
}
}
Invocation.Builder invocationBuilder = webResource.request(MediaType.APPLICATION_JSON);
Response response;
try {
switch (dbWebServiceTask.getMethod(controlNodeInstance.getControlNode_id())) {
case "POST":
String post = dbWebServiceTask.getPOST(controlNodeInstance.getControlNode_id());
for (DataAttributeInstance dataAttributeInstance : scenarioInstance.getDataAttributeInstances().values()) {
post = post.replace(
"#" + (dataAttributeInstance.getDataObjectInstance()).getName()
+ "." + dataAttributeInstance.getName(), dataAttributeInstance.getValue().toString());
}
response = invocationBuilder.post(Entity.json(post));
break;
case "PUT":
post = dbWebServiceTask.getPOST(controlNodeInstance.getControlNode_id());
for (DataAttributeInstance dataAttributeInstance : scenarioInstance.getDataAttributeInstances().values()) {
post = post.replace(
"#" + (dataAttributeInstance.getDataObjectInstance()).getName()
+ "." + dataAttributeInstance.getName(), dataAttributeInstance.getValue().toString());
}
response = invocationBuilder.put(Entity.json(post));
break;
default:
response = invocationBuilder.get();
}
if (response.getStatus() >= 200 && response.getStatus() <= 226) {
this.writeDataAttributes(response.readEntity(String.class));
}
} catch (Exception e) {
log.error("Error by getting URL", e);
}
this.setCanTerminate(true);
}
/**
* Sets the specific data attribute values to the content from the request.
*
* @param content from GET Request.
*/
private void writeDataAttributes(String content) {
LinkedList<Integer> dataAttributeIds = dbWebServiceTask.getAttributeIdsForControlNode(controlNodeInstance.getControlNode_id());
for (int dataAttributeId : dataAttributeIds) {
LinkedList<String> keys = dbWebServiceTask.getKeys(controlNodeInstance.getControlNode_id(), dataAttributeId);
JSONObject jsonContent = new JSONObject(content);
JSONArray jsonArray = null;
boolean isJSONArray = false;
int i;
for (i = 0; i < keys.size() - 1; i++) {
try {
jsonContent = jsonContent.getJSONObject(keys.get(i));
jsonArray = null;
isJSONArray = false;
} catch (Exception e1) {
try {
jsonContent = jsonArray.getJSONObject(new Integer(keys.get(i)));
jsonArray = null;
isJSONArray = false;
} catch (Exception e2) {
try {
jsonArray = jsonContent.getJSONArray(keys.get(i));
jsonContent = null;
isJSONArray = true;
} catch (Exception e3) {
jsonArray = jsonArray.getJSONArray(new Integer(keys.get(i)));
jsonContent = null;
isJSONArray = true;
}
}
}
}
for (DataAttributeInstance dataAttributeInstance : scenarioInstance.getDataAttributeInstances().values()) {
if (dataAttributeInstance.getDataAttribute_id() == dataAttributeId) {
if (isJSONArray) {
dataAttributeInstance.setValue(jsonArray.get(new Integer(keys.get(i))));
} else {
dataAttributeInstance.setValue(jsonContent.get(keys.get(i)));
}
}
}
}
}
}
|
package com.swandiggy.poe4j.ggpkg;
import com.swandiggy.poe4j.Poe4jException;
import com.swandiggy.poe4j.ggpkg.factory.RecordFactory;
import com.swandiggy.poe4j.ggpkg.record.DirectoryRecord;
import com.swandiggy.poe4j.ggpkg.record.FileRecord;
import com.swandiggy.poe4j.ggpkg.record.Record;
import com.swandiggy.poe4j.util.aspect.MonitorRuntime;
import com.swandiggy.poe4j.util.collection.Node;
import com.swandiggy.poe4j.util.io.BinaryReader;
import com.swandiggy.poe4j.util.io.RafBinaryReader;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
/**
* Factory for {@link Ggpk}.
*
* @author Jacob Swanson
* @since 8/31/2015
*/
@Slf4j
public class GgpkFactory {
@Setter
private RecordFactory[] recordFactories;
public GgpkFactory() {
}
public GgpkFactory(RecordFactory[] recordFactories) {
Assert.notEmpty(recordFactories);
this.recordFactories = recordFactories;
}
/**
* Parse a GGPKG's record structure.
*
* @param ggpkgFile Content.ggpk
* @return Ggpk record structure
*/
@MonitorRuntime("Loaded records in %f seconds")
public Ggpk load(File ggpkgFile) {
Assert.notNull(ggpkgFile);
Assert.isTrue(ggpkgFile.exists(), "ggpkgFile did not exist");
log.info("Loading '" + ggpkgFile + "'");
Map<Long, Record> records = new HashMap<>();
// Extract records
try (BinaryReader br = new RafBinaryReader(ggpkgFile, "r")) {
while (br.getPosition() < br.length()) {
long recordOffset = br.getPosition();
int recordLength = br.readInt();
String tag = br.readString(4);
Optional<RecordFactory> factory = Arrays.stream(recordFactories)
.filter(recordFactory -> recordFactory.supports(tag))
.findAny();
if (!factory.isPresent()) {
throw new RuntimeException("No factory for tag: '" + tag + "'");
}
records.put(recordOffset, factory.get().read(br, recordOffset, recordLength));
}
} catch (IOException e) {
throw new Poe4jException("Could not close reader", e);
}
// Create all of the nodes for the directory structure
Map<Long, Node<Record>> nodes = new HashMap<>();
records.values().stream()
.filter(record -> record instanceof FileRecord || record instanceof DirectoryRecord)
.map(Node::new)
.forEach(recordNode -> nodes.put(recordNode.getData().getRecordStart(), recordNode));
// Connect all of the nodes together
nodes.values().stream()
.filter(recordNode -> recordNode.getData() instanceof DirectoryRecord)
.forEach(recordNode -> {
for (DirectoryRecord.DirectoryEntry directoryEntry : ((DirectoryRecord) recordNode.getData()).getEntries()) {
Node<Record> childNode = nodes.get(directoryEntry.getRecordStart());
recordNode.addChild(childNode);
childNode.setParent(recordNode);
}
});
return new Ggpk(records, ggpkgFile, nodes);
}
}
|
package net.iponweb.disthene.reader.graphite.functions;
import net.iponweb.disthene.reader.beans.TimeSeries;
import net.iponweb.disthene.reader.exceptions.EvaluationException;
import net.iponweb.disthene.reader.exceptions.InvalidArgumentException;
import net.iponweb.disthene.reader.exceptions.MultipleDivisorsException;
import net.iponweb.disthene.reader.exceptions.TimeSeriesNotAlignedException;
import net.iponweb.disthene.reader.graphite.Target;
import net.iponweb.disthene.reader.graphite.evaluation.TargetEvaluator;
import net.iponweb.disthene.reader.utils.TimeSeriesUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* @author Andrei Ivanov
*/
public class DivideSeriesFunction extends DistheneFunction {
public DivideSeriesFunction(String text) {
super(text, "divideSeries");
}
@Override
public List<TimeSeries> evaluate(TargetEvaluator evaluator) throws EvaluationException {
List<TimeSeries> dividends = new ArrayList<>();
dividends.addAll(evaluator.eval((Target) arguments.get(0)));
if (dividends.size() == 0) return Collections.emptyList();
List<TimeSeries> divisors = evaluator.eval((Target) arguments.get(1));
if (divisors.size() == 0) return Collections.emptyList();
if (divisors.size() != 1) throw new MultipleDivisorsException();
TimeSeries divisor = divisors.get(0);
List<TimeSeries> tmp = new ArrayList<>();
tmp.addAll(dividends);
tmp.add(divisor);
if (!TimeSeriesUtils.checkAlignment(tmp)) {
throw new TimeSeriesNotAlignedException();
}
List<TimeSeries> result = new ArrayList<>();
int length = divisor.getValues().length;
long from = divisor.getFrom();
long to = divisor.getTo();
int step = divisor.getStep();
for (TimeSeries ts : dividends) {
Double[] values = new Double[length];
TimeSeries resultTimeSeries = new TimeSeries(getText(), from, to, step);
for (int i = 0; i < length; i++) {
if (divisor.getValues()[i] == null || ts.getValues()[i] == null || divisor.getValues()[i] == 0) {
values[i] = null;
} else {
values[i] = ts.getValues()[i] / divisor.getValues()[i];
}
}
resultTimeSeries.setValues(values);
result.add(resultTimeSeries);
}
return result;
}
@Override
public void checkArguments() throws InvalidArgumentException {
if (arguments.size() > 2 || arguments.size() < 1) throw new InvalidArgumentException("divideSeries: number of arguments is " + arguments.size() + ". Must be 2.");
for(Object argument : arguments) {
if (!(argument instanceof Target)) throw new InvalidArgumentException("divideSeries: argument is " + argument.getClass().getName() + ". Must be series");
}
}
}
|
package mll.service;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.junit.Test;
import mll.beans.PlaylistReference;
public class PlaylistReferenceServiceTest {
@Test
public void testGetAllPlaylistsForUserId1()
{
try
{
PlaylistReferenceService service = new PlaylistReferenceService();
assertEquals(true, service.getAllPlaylistsForUser(-1) == null);
}
catch (Exception e)
{
}
}
@SuppressWarnings("unchecked")
@Test
public void testConvertToJson1()
{
try
{
PlaylistReferenceService service = new PlaylistReferenceService();
List<PlaylistReference> playlists = new ArrayList<PlaylistReference>();
PlaylistReference playlistReference = new PlaylistReference();
playlistReference.setId(1);
playlistReference.setPlaylistName("Test");
playlistReference.setUserId(1);
playlistReference.setCreationDate(new Date());
playlistReference.setIsShared(false);
playlists.add(playlistReference);
JSONArray jsonArray = new JSONArray();
JSONObject object = new JSONObject();
object.put("id", 1);
object.put("playlistName", "Test");
object.put("userId", 1);
jsonArray.add(object);
int count = service.convertToJson(playlists).size();
assertEquals(true, count==jsonArray.size());
}
catch(Exception e)
{
}
}
@Test
public void testSetPlaylistToGlobal1()
{
try
{
int userId = 0;
int playlistId = 0;
assertEquals(true, new PlaylistReferenceService().setPlaylistToGlobal(userId, playlistId) != null);
} catch (Exception e)
{
}
}
@Test
public void testSetPlaylistToGlobal2()
{
try
{
int userId = 0;
int playlistId = 10;
assertEquals(true, new PlaylistReferenceService().setPlaylistToGlobal(userId, playlistId) != null);
} catch (Exception e)
{
}
}
@Test
public void testAddPlaylistForUser2()
{
try
{
int userId = 1;
String playlistName = null;
assertEquals(true, new PlaylistReferenceService().addPlaylistForUser(userId, playlistName) != true);
} catch (Exception e)
{
}
}
@Test
public void testGetSharedPlaylist1()
{
try
{
assertEquals(true, new PlaylistReferenceService().getSharedPlaylists() != null);
}
catch(Exception e)
{
}
}
@Test
public void testDeletePlaylistForUser1()
{
try
{
int playlistId = 100000;
int userId = 100000;
assertEquals(false, new PlaylistReferenceService().deletePlaylistForUser(playlistId, userId));
}
catch(Exception e)
{
}
}
@Test
public void testDeletePlaylistForUser2()
{
try
{
int playlistId = -1;
int userId = -1;
assertEquals(false, new PlaylistReferenceService().deletePlaylistForUser(playlistId, userId));
}
catch(Exception e)
{
}
}
}
|
package com.francescocervone.rxdrive;
import android.app.Activity;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentSender;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.Log;
import android.webkit.MimeTypeMap;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.drive.Drive;
import com.google.android.gms.drive.DriveApi;
import com.google.android.gms.drive.DriveContents;
import com.google.android.gms.drive.DriveFile;
import com.google.android.gms.drive.DriveFolder;
import com.google.android.gms.drive.DriveId;
import com.google.android.gms.drive.DriveResource;
import com.google.android.gms.drive.Metadata;
import com.google.android.gms.drive.MetadataBuffer;
import com.google.android.gms.drive.MetadataChangeSet;
import com.google.android.gms.drive.query.Query;
import org.apache.commons.io.IOUtils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import rx.Observable;
import rx.Subscriber;
import rx.functions.Func0;
import rx.subjects.PublishSubject;
public class RxDrive {
private static final int RESOLVE_CONNECTION_REQUEST_CODE = 1;
private static final int NO_RESOLUTION_REQUEST_CODE = 0;
private PublishSubject<ConnectionState> mConnectionStatePublishSubject = PublishSubject.create();
private GoogleApiClient mClient;
private GoogleApiClient.ConnectionCallbacks mConnectionCallbacks = new GoogleApiClient.ConnectionCallbacks() {
@Override
public void onConnected(@Nullable Bundle bundle) {
mConnectionStatePublishSubject.onNext(ConnectionState.connected(bundle));
}
@Override
public void onConnectionSuspended(int cause) {
mConnectionStatePublishSubject.onNext(ConnectionState.suspended(cause));
}
};
private GoogleApiClient.OnConnectionFailedListener mConnectionFailedListener = new GoogleApiClient.OnConnectionFailedListener() {
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
mConnectionStatePublishSubject.onNext(ConnectionState.failed(connectionResult));
}
};
/**
* @param builder is a GoogleApiClient builder for your application
*/
public RxDrive(GoogleApiClient.Builder builder) {
mClient = builder.addApi(Drive.API)
.addConnectionCallbacks(mConnectionCallbacks)
.addOnConnectionFailedListener(mConnectionFailedListener)
.build();
}
public RxDrive(Context context) {
this(new GoogleApiClient.Builder(context).addScope(Drive.SCOPE_FILE));
}
/**
* Creates an Observable that emits the connection state changes the GoogleApiClient
*
* @return the Observable for connection state changes
*/
public Observable<ConnectionState> connectionObservable() {
return mConnectionStatePublishSubject.asObservable();
}
/**
* Establishes a connection with the GoogleApiClient created before
*/
public void connect() {
mClient.connect();
}
/**
* Disconnects from GoogleApiClient
*/
public void disconnect() {
mClient.disconnect();
}
/**
* Check if the GoogleApiClient is connected
*
* @return true if GoogleApiClient is connected, false otherwise
*/
public boolean isConnected() {
return mClient.isConnected();
}
/**
* @return the root folder of Google Drive
*/
public DriveFolder getRootFolder() {
return Drive.DriveApi.getRootFolder(mClient);
}
/**
* @return the app folder on Google Drive
*/
public DriveFolder getAppFolder() {
return Drive.DriveApi.getAppFolder(mClient);
}
/**
* Fetches a driveId
*
* @param s the string of the driveId
* @return an Observable with the driveId if exists
*/
public Observable<DriveId> fetchDriveId(final String s) {
return Observable.defer(new Func0<Observable<DriveId>>() {
@Override
public Observable<DriveId> call() {
DriveApi.DriveIdResult driveIdResult = Drive.DriveApi.fetchDriveId(mClient, s)
.await();
if (driveIdResult.getStatus().isSuccess()) {
return Observable.just(driveIdResult.getDriveId());
} else {
return Observable.error(new RxDriveException(driveIdResult.getStatus()));
}
}
});
}
/**
* Lists resources in default folder
*
* @return an Observable with the list of the resources
*/
public Observable<List<DriveId>> listChildren(final DriveFolder driveFolder) {
return Observable.defer(new Func0<Observable<List<DriveId>>>() {
@Override
public Observable<List<DriveId>> call() {
List<DriveId> list = new ArrayList<>();
DriveApi.MetadataBufferResult result = driveFolder.listChildren(mClient).await();
if (result.getStatus().isSuccess()) {
MetadataBuffer buffer = result.getMetadataBuffer();
for (Metadata m : buffer) {
list.add(m.getDriveId());
}
buffer.release();
return Observable.just(list);
} else {
return Observable.error(new RxDriveException(result.getStatus()));
}
}
});
}
/**
* Lists the parents of a Drive resource
*
* @param driveResource
* @return the list of the parents
*/
public Observable<List<DriveId>> listParents(final DriveResource driveResource) {
return Observable.defer(new Func0<Observable<List<DriveId>>>() {
@Override
public Observable<List<DriveId>> call() {
List<DriveId> list = new ArrayList<>();
DriveApi.MetadataBufferResult result = driveResource.listParents(mClient).await();
if (result.getStatus().isSuccess()) {
MetadataBuffer buffer = result.getMetadataBuffer();
for (Metadata m : buffer) {
list.add(m.getDriveId());
}
buffer.release();
return Observable.just(list);
} else {
return Observable.error(new RxDriveException(result.getStatus()));
}
}
});
}
/**
* Sets the parents of a resource
*
* @param driveResource the resource where to set the parents
* @param parents a set of drive id that will be the parents of the resource
* @return true if the operation succeeds
*/
public Observable<Boolean> setParents(final DriveResource driveResource, final Set<DriveId> parents) {
return Observable.defer(new Func0<Observable<Boolean>>() {
@Override
public Observable<Boolean> call() {
Status status = driveResource.setParents(mClient, parents).await();
if (status.isSuccess()) {
return Observable.just(true);
} else {
return Observable.error(new RxDriveException(status));
}
}
});
}
/**
* Executes a Query on Google Drive in the default folder
*
* @param query the query you want to submit
* @return
*/
public Observable<List<DriveId>> query(final Query query) {
return Observable.defer(new Func0<Observable<List<DriveId>>>() {
@Override
public Observable<List<DriveId>> call() {
List<DriveId> list = new ArrayList<>();
DriveApi.MetadataBufferResult result = Drive.DriveApi
.query(mClient, query)
.await();
if (result.getStatus().isSuccess()) {
MetadataBuffer buffer = result.getMetadataBuffer();
for (Metadata metadata : buffer) {
list.add(metadata.getDriveId());
}
buffer.release();
return Observable.just(list);
} else {
return Observable.error(new RxDriveException(result.getStatus()));
}
}
});
}
/**
* Lists resources matching a query
*
* @param query Drive query
* @return an Observable with the list of the resources
*/
public Observable<List<DriveId>> queryChildren(final DriveFolder driveFolder, final Query query) {
return Observable.defer(new Func0<Observable<List<DriveId>>>() {
@Override
public Observable<List<DriveId>> call() {
List<DriveId> list = new ArrayList<>();
DriveApi.MetadataBufferResult result = driveFolder
.queryChildren(mClient, query)
.await();
if (result.getStatus().isSuccess()) {
MetadataBuffer buffer = result.getMetadataBuffer();
for (Metadata metadata : buffer) {
list.add(metadata.getDriveId());
}
buffer.release();
return Observable.just(list);
} else {
return Observable.error(new RxDriveException(result.getStatus()));
}
}
});
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param file is the file that will be uploaded
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(DriveFolder folder, final File file) {
return createFile(folder, file, file.getName());
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param file is the file that will be uploaded
* @param title is the title that you want for the new file
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(DriveFolder folder, File file, String title) {
return createFile(folder, file, title, MimeTypeMap.getFileExtensionFromUrl(file.getPath()));
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param file is the file that will be uploaded
* @param title is the title that you want for the new file
* @param mimeType is the mimeType of the file
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(DriveFolder folder, File file, String title, String mimeType) {
return createFile(folder, Uri.fromFile(file), title, mimeType);
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param uri is the Uri of a file that will be uploaded
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(DriveFolder folder, final Uri uri) {
return createFile(folder, uri, uri.getLastPathSegment());
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param uri is the Uri of a file that will be uploaded
* @param title is the title that you want for the new file
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(DriveFolder folder, final Uri uri, String title) {
return createFile(folder, uri, title, getContentResolver().getType(uri));
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param uri is the Uri of a file that will be uploaded
* @param title is the title that you want for the new file
* @param mimeType is the mimeType of the file
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(DriveFolder folder, final Uri uri, String title, String mimeType) {
try {
return createFile(
folder,
getContentResolver()
.openInputStream(uri),
title,
mimeType);
} catch (FileNotFoundException e) {
return Observable.error(e);
}
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param inputStream is the InputStream that will be uploaded
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(DriveFolder folder, final InputStream inputStream) {
return createFile(folder, inputStream, String.valueOf(System.currentTimeMillis()));
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param inputStream is the InputStream that will be uploaded
* @param title is the title that you want for the new file
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(DriveFolder folder, final InputStream inputStream, String title) {
return createFile(folder, inputStream, title, null);
}
/**
* Creates a file on Drive
*
* @param folder the folder where to create the new file
* @param inputStream is the InputStream that will be uploaded
* @param title is the title that you want for the new file
* @param mimeType is the mimeType of the file
* @return an Observable with the new DriveId
*/
public Observable<DriveId> createFile(
final DriveFolder folder,
final InputStream inputStream,
final String title,
final String mimeType) {
return Observable.defer(new Func0<Observable<DriveId>>() {
@Override
public Observable<DriveId> call() {
try {
DriveContents driveContents = Drive.DriveApi.newDriveContents(mClient)
.await()
.getDriveContents();
IOUtils.copy(
inputStream,
driveContents.getOutputStream());
DriveFolder.DriveFileResult result = folder
.createFile(
mClient,
new MetadataChangeSet.Builder()
.setTitle(title)
.setMimeType(mimeType)
.build(),
driveContents)
.await();
if (result.getStatus().isSuccess()) {
return Observable.just(result.getDriveFile().getDriveId());
} else {
return Observable.error(new RxDriveException(result.getStatus()));
}
} catch (IOException e) {
e.printStackTrace();
return Observable.error(e);
}
}
});
}
/**
* Updates a file on Drive
*
* @param driveFile drive file
* @param file the content to write
* @return an Observable with the new DriveId
*/
public Observable<DriveFile> updateFileContent(final DriveFile driveFile, File file) {
return updateFileContent(driveFile, Uri.fromFile(file));
}
/**
* Updates a file on Drive
*
* @param driveFile drive file
* @param uri the content to write
* @return an Observable with the new DriveId
*/
public Observable<DriveFile> updateFileContent(final DriveFile driveFile, Uri uri) {
try {
return updateFileContent(driveFile, getContentResolver().openInputStream(uri));
} catch (FileNotFoundException e) {
return Observable.error(e);
}
}
/**
* Updates a file on Drive
*
* @param driveFile drive file
* @param content the content to write
* @return an Observable with the DriveId
*/
public Observable<DriveFile> updateFileContent(final DriveFile driveFile, final InputStream content) {
return Observable.defer(new Func0<Observable<DriveFile>>() {
@Override
public Observable<DriveFile> call() {
DriveApi.DriveContentsResult driveContentsResult = driveFile
.open(mClient, DriveFile.MODE_WRITE_ONLY, null)
.await();
DriveContents driveContents = driveContentsResult.getDriveContents();
try {
IOUtils.copy(content, driveContents.getOutputStream());
Status status = driveContents.commit(mClient, null).await();
if (status.isSuccess()) {
return Observable.just(driveFile);
} else {
return Observable.error(new RxDriveException(status));
}
} catch (IOException e) {
e.printStackTrace();
return Observable.error(e);
}
}
});
}
/**
* Creates a new folder
*
* @param folder where to create the new folder
* @param title the title of the new folder
* @return an observable with the new DriveFolder object
*/
public Observable<DriveFolder> createFolder(final DriveFolder folder, final String title) {
return Observable.defer(new Func0<Observable<DriveFolder>>() {
@Override
public Observable<DriveFolder> call() {
MetadataChangeSet metadataChangeSet = new MetadataChangeSet.Builder()
.setTitle(title)
.build();
DriveFolder.DriveFolderResult result = folder.createFolder(
mClient,
metadataChangeSet)
.await();
if (result.getStatus().isSuccess()) {
return Observable.just(result.getDriveFolder());
} else {
return Observable.error(new RxDriveException(result.getStatus()));
}
}
});
}
/**
* Removes a resource from Drive
*
* @param driveResource the resource that will be removed from Drive
* @return an Observable with `true` if the resource is removed
*/
public Observable<Boolean> delete(final DriveResource driveResource) {
return Observable.defer(new Func0<Observable<Boolean>>() {
@Override
public Observable<Boolean> call() {
Status status = driveResource.delete(mClient).await();
if (status.isSuccess()) {
return Observable.just(true);
} else {
return Observable.error(new RxDriveException(status));
}
}
});
}
/**
* Trashes a resource
*
* @param driveResource the resource to put in the trash
* @return true if the operation succeeds
*/
public Observable<Boolean> trash(final DriveResource driveResource) {
return Observable.defer(new Func0<Observable<Boolean>>() {
@Override
public Observable<Boolean> call() {
Status status = driveResource.trash(mClient).await();
if (status.isSuccess()) {
return Observable.just(true);
} else {
return Observable.error(new RxDriveException(status));
}
}
});
}
/**
* Untrashes a resource
*
* @param driveResource the resource to remove from the trash
* @return true if the operation succeeds
*/
public Observable<Boolean> untrash(final DriveResource driveResource) {
return Observable.defer(new Func0<Observable<Boolean>>() {
@Override
public Observable<Boolean> call() {
Status status = driveResource.untrash(mClient).await();
if (status.isSuccess()) {
return Observable.just(true);
} else {
return Observable.error(new RxDriveException(status));
}
}
});
}
/**
* Do sync
*
* @return nothing
*/
public Observable<Void> sync() {
return Observable.defer(new Func0<Observable<Void>>() {
@Override
public Observable<Void> call() {
Drive.DriveApi.requestSync(mClient).await();
return Observable.just(null);
}
});
}
/**
* Returns the Metadata of a DriveResource
*
* @param driveResource the resource you want the Metadata
* @return the Metadata of the driveResource
*/
public Observable<Metadata> getMetadata(final DriveResource driveResource) {
return Observable.defer(new Func0<Observable<Metadata>>() {
@Override
public Observable<Metadata> call() {
DriveResource.MetadataResult result = driveResource.getMetadata(mClient).await();
if (result.getStatus().isSuccess()) {
return Observable.just(result.getMetadata());
} else {
return Observable.error(new RxDriveException(result.getStatus()));
}
}
});
}
/**
* Open a driveId
*
* @param driveId the file to open
* @return the InputStream of the content
*/
public Observable<InputStream> open(DriveId driveId) {
return open(driveId, null);
}
/**
* Open a driveId
*
* @param driveId the file to open
* @param progressSubscriber the subscriber that listen for download progress
* @return the InputStream of the content
*/
public Observable<InputStream> open(final DriveId driveId,
final Subscriber<Progress> progressSubscriber) {
return Observable.defer(new Func0<Observable<InputStream>>() {
@Override
public Observable<InputStream> call() {
DriveApi.DriveContentsResult result = driveId.asDriveFile().open(
mClient,
DriveFile.MODE_READ_ONLY,
new DriveFile.DownloadProgressListener() {
@Override
public void onProgress(long bytesDownloaded, long bytesExpected) {
if (progressSubscriber != null) {
Log.d("maccio", "onProgress: " + bytesDownloaded + " " + bytesExpected);
progressSubscriber.onNext(
new Progress(bytesDownloaded, bytesExpected));
}
}
})
.await();
if (result.getStatus().isSuccess()) {
if (progressSubscriber != null) {
progressSubscriber.onCompleted();
}
return Observable.just(result.getDriveContents().getInputStream());
} else {
return Observable.error(new RxDriveException(result.getStatus()));
}
}
});
}
/**
* Tries to resolve GoogleApiClient connection failed
*
* @param activity the current activity
* @param result the connection result of GoogleApiClient
*/
public void resolveConnection(Activity activity, ConnectionResult result) {
if (result.hasResolution()) {
try {
result.startResolutionForResult(activity, RESOLVE_CONNECTION_REQUEST_CODE);
} catch (IntentSender.SendIntentException e) {
mConnectionStatePublishSubject.onNext(ConnectionState.unableToResolve(result));
}
} else {
GoogleApiAvailability.getInstance()
.getErrorDialog(activity, result.getErrorCode(), NO_RESOLUTION_REQUEST_CODE)
.show();
}
}
/**
* You should call this method in your onActivityResult if you tried to resolve connection
* with RxDrive using method resolveConnection.
*
* @param requestCode request code of onActivityResult
* @param resultCode result code of onActivityResult
* @param data intent of onActivityResult
* @see #resolveConnection(Activity, ConnectionResult)
*/
public void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case RESOLVE_CONNECTION_REQUEST_CODE:
case NO_RESOLUTION_REQUEST_CODE:
if (resultCode == Activity.RESULT_OK) {
connect();
}
break;
}
}
private ContentResolver getContentResolver() {
return getContext()
.getContentResolver();
}
private Context getContext() {
return mClient.getContext();
}
}
|
package net.binggl.ninja.tests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.net.UnknownHostException;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import de.flapdoodle.embed.mongo.MongodExecutable;
import de.flapdoodle.embed.mongo.MongodProcess;
import de.flapdoodle.embed.mongo.MongodStarter;
import de.flapdoodle.embed.mongo.config.IMongodConfig;
import de.flapdoodle.embed.mongo.config.MongodConfigBuilder;
import de.flapdoodle.embed.mongo.config.Net;
import de.flapdoodle.embed.mongo.distribution.Version;
import de.flapdoodle.embed.process.runtime.Network;
import models.TestModel;
import net.binggl.ninja.mongodb.MongoDB;
import ninja.NinjaTest;
public class TestMorphiaModule extends NinjaTest {
private static final boolean EMBEDDED_MONGO = true;
private static MongoDB mongoDB;
private static final MongodStarter starter = MongodStarter.getDefaultInstance();
private static MongodExecutable mongodExe;
private static MongodProcess mongod;
@BeforeClass
public static void init() throws UnknownHostException, IOException {
if(EMBEDDED_MONGO) {
IMongodConfig mongodConfig = new MongodConfigBuilder()
.version(Version.Main.PRODUCTION)
.net(new Net(29019, Network.localhostIsIPv6()))
.build();
mongodExe = starter.prepare(mongodConfig);
mongod = mongodExe.start();
}
}
@AfterClass
public static void shutdown() {
if(EMBEDDED_MONGO) {
mongod.stop();
mongodExe.stop();
}
}
@Before
public void setup() throws Exception{
mongoDB = getInjector().getInstance(MongoDB.class);
mongoDB.deleteAll(TestModel.class);
//mongoDB.dropDatabase();
}
@Test
public void testInit() {
assertNotNull(mongoDB.getMongoClient());
assertNotNull(mongoDB.getMorphia());
assertNotNull(mongoDB.getDatastore());
}
@Test
public void testInsertAndFindAll() {
mongoDB.save(new TestModel("foo"));
assertEquals(1, mongoDB.findAll(TestModel.class).size());
mongoDB.dropDatabase();
assertEquals(0, mongoDB.findAll(TestModel.class).size());
}
@Test
public void testFindById() {
mongoDB.save(new TestModel("foo"));
TestModel testModel = mongoDB.getDatastore().find(TestModel.class).field("name").equal("foo").get();
assertNotNull(mongoDB.findById(testModel.getId(), TestModel.class));
}
@Test
public void testCountAll() {
mongoDB.save(new TestModel("foo"));
mongoDB.save(new TestModel("bar"));
mongoDB.save(new TestModel("bla"));
assertEquals(3, mongoDB.countAll(TestModel.class));
}
@Test
public void testDeleteAll() {
mongoDB.save(new TestModel("foo"));
mongoDB.save(new TestModel("bar"));
mongoDB.save(new TestModel("bla"));
assertEquals(3, mongoDB.countAll(TestModel.class));
mongoDB.deleteAll(TestModel.class);
assertEquals(0, mongoDB.countAll(TestModel.class));
}
}
|
package org.jenkinsci.plugins.lucene.search.databackend;
import com.google.common.collect.TreeMultimap;
import hudson.model.AbstractBuild;
import hudson.model.BallColor;
import hudson.model.Cause;
import hudson.model.Job;
import jenkins.model.Jenkins;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.MultiFieldQueryParser;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryTermScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import org.jenkinsci.plugins.lucene.search.Field;
import org.jenkinsci.plugins.lucene.search.FreeTextSearchExtension;
import org.jenkinsci.plugins.lucene.search.FreeTextSearchItemImplementation;
import org.jenkinsci.plugins.lucene.search.config.SearchBackendEngine;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import static org.jenkinsci.plugins.lucene.search.Field.BALL_COLOR;
import static org.jenkinsci.plugins.lucene.search.Field.BUILD_NUMBER;
import static org.jenkinsci.plugins.lucene.search.Field.CONSOLE;
import static org.jenkinsci.plugins.lucene.search.Field.ID;
import static org.jenkinsci.plugins.lucene.search.Field.PROJECT_NAME;
import static org.jenkinsci.plugins.lucene.search.Field.START_TIME;
import static org.jenkinsci.plugins.lucene.search.Field.getIndex;
public class LuceneSearchBackend extends SearchBackend {
private static final Logger LOGGER = Logger.getLogger(LuceneSearchBackend.class.getName());
private static final int MAX_NUM_FRAGMENTS = 5;
private static final String[] EMPTY_ARRAY = new String[0];
private static final Locale LOCALE = Locale.ENGLISH;
private static final org.apache.lucene.document.Field.Store DONT_STORE = org.apache.lucene.document.Field.Store.NO;
private static final org.apache.lucene.document.Field.Store STORE = org.apache.lucene.document.Field.Store.YES;
private static final Comparator<Float> FLOAT_COMPARATOR = new Comparator<Float>() {
@Override
public int compare(Float o1, Float o2) {
return o2.compareTo(o1);
}
};
private static final Comparator<Document> START_TIME_COMPARATOR = new Comparator<Document>() {
private Long getStartTime(Document o) {
IndexableField field = o.getField(START_TIME.fieldName);
if (field != null) {
return field.numericValue().longValue();
}
return 0l;
}
@Override
public int compare(Document o1, Document o2) {
return getStartTime(o2).compareTo(getStartTime(o1));
}
};
private static final Version LUCENE_VERSION = Version.LUCENE_4_9;
private static final int MAX_HITS_PER_PAGE = 100;
private final Directory index;
private final Analyzer analyzer;
private final IndexWriter dbWriter;
private final File indexPath;
private DirectoryReader reader;
public LuceneSearchBackend(final File indexPath) throws IOException {
super(SearchBackendEngine.LUCENE);
this.indexPath = indexPath;
analyzer = new StandardAnalyzer(LUCENE_VERSION, CharArraySet.EMPTY_SET);
index = FSDirectory.open(indexPath);
IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, analyzer);
dbWriter = new IndexWriter(index, config);
updateReader();
}
public static LuceneSearchBackend create(final Map<String, Object> config) {
try {
return new LuceneSearchBackend(getIndexPath(config));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static File getIndexPath(final Map<String, Object> config) {
return (File) config.get("lucenePath");
}
@Override
public SearchBackend reconfigure(final Map<String, Object> newConfig) {
if (getIndexPath(newConfig).equals(indexPath)) {
return this;
} else {
close();
return create(newConfig);
}
}
public synchronized void close() {
IOUtils.closeQuietly(dbWriter);
IOUtils.closeQuietly(index);
}
private void updateReader() throws IOException {
dbWriter.commit();
reader = DirectoryReader.open(index);
}
private Long getWithDefault(String number, Long defaultNumber) {
if (number != null) {
Long l = Long.getLong(number);
if (l != null) {
return l;
}
}
return defaultNumber;
}
@Override
public List<FreeTextSearchItemImplementation> getHits(String query, boolean includeHighlights) {
List<FreeTextSearchItemImplementation> luceneSearchResultImpl = new ArrayList<FreeTextSearchItemImplementation>();
try {
MultiFieldQueryParser queryParser = getQueryParser();
Query q = queryParser.parse(query).rewrite(reader);
IndexSearcher searcher = new IndexSearcher(reader);
TopScoreDocCollector collector = TopScoreDocCollector.create(MAX_HITS_PER_PAGE, true);
QueryTermScorer scorer = new QueryTermScorer(q);
Highlighter highlighter = new Highlighter(new SimpleHTMLFormatter(), scorer);
searcher.search(q, collector);
ScoreDoc[] hits = collector.topDocs().scoreDocs;
TreeMultimap<Float, Document> docs = TreeMultimap.create(FLOAT_COMPARATOR, START_TIME_COMPARATOR);
for (ScoreDoc hit : hits) {
Document doc = searcher.doc(hit.doc);
docs.put(hit.score, doc);
}
for (Document doc : docs.values()) {
String[] bestFragments = EMPTY_ARRAY;
if (includeHighlights) {
try {
bestFragments = highlighter.getBestFragments(analyzer, CONSOLE.fieldName,
doc.get(CONSOLE.fieldName), MAX_NUM_FRAGMENTS);
} catch (InvalidTokenOffsetsException e) {
LOGGER.warning("Failed to find bestFragments: " + e);
}
}
BallColor buildIcon = BallColor.GREY;
String colorName = doc.get(BALL_COLOR.fieldName);
if (colorName != null) {
buildIcon = BallColor.valueOf(colorName);
}
luceneSearchResultImpl.add(new FreeTextSearchItemImplementation(doc.get(PROJECT_NAME.fieldName), doc
.get(BUILD_NUMBER.fieldName), bestFragments, buildIcon.getImage()));
}
} catch (ParseException e) {
// Do nothing
} catch (IOException e) {
// Do nothing
}
return luceneSearchResultImpl;
}
private MultiFieldQueryParser getQueryParser() {
MultiFieldQueryParser queryParser = new MultiFieldQueryParser(LUCENE_VERSION, getAllDefaultSearchableFields(),
analyzer) {
@Override
protected Query getRangeQuery(String field, String part1, String part2, boolean startInclusive,
boolean endInclusive) throws ParseException {
if (field != null && getIndex(field).numeric) {
Long min = getWithDefault(part1, null);
Long max = getWithDefault(part2, null);
return NumericRangeQuery.newLongRange(field, min, max, true, true);
} else if (field != null) {
return new TermQuery(new Term(field));
}
return super.getRangeQuery(null, part1, part2, startInclusive, endInclusive);
}
};
queryParser.setDefaultOperator(QueryParser.Operator.AND);
queryParser.setLocale(LOCALE);
queryParser.setAnalyzeRangeTerms(true);
queryParser.setLowercaseExpandedTerms(true);
return queryParser;
}
@Override
public void storeBuild(final AbstractBuild<?, ?> build) throws IOException {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
build.getLogText().writeLogTo(0, byteArrayOutputStream);
String consoleOutput = byteArrayOutputStream.toString();
try {
Document doc = new Document();
doc.add(new StringField(Field.ID.fieldName, build.getId(), STORE));
doc.add(new TextField(Field.PROJECT_NAME.fieldName, build.getProject().getName(), STORE));
doc.add(new TextField(Field.PROJECT_DISPLAY_NAME.fieldName, build.getProject().getDisplayName(), STORE));
doc.add(new LongField(Field.BUILD_NUMBER.fieldName, build.getNumber(), STORE));
doc.add(new TextField(Field.RESULT.fieldName, build.getResult().toString(), STORE));
doc.add(new LongField(Field.DURATION.fieldName, build.getDuration(), DONT_STORE));
doc.add(new LongField(Field.START_TIME.fieldName, build.getStartTimeInMillis(), STORE));
doc.add(new TextField(Field.BUILT_ON.fieldName, build.getBuiltOnStr(), DONT_STORE));
StringBuilder shortDescriptions = new StringBuilder();
for (Cause cause : build.getCauses()) {
shortDescriptions.append(" ").append(cause.getShortDescription());
}
doc.add(new TextField(Field.START_CAUSE.fieldName, shortDescriptions.toString(), DONT_STORE));
doc.add(new StringField(Field.BALL_COLOR.fieldName, build.getIconColor().name(), STORE));
// TODO Add the following data
// build.getChangeSet()
// build.getCulprits()
// EnvVars a = build.getEnvironment(listener);
// build.get
// build.getArtifacts()
doc.add(new TextField(Field.CONSOLE.fieldName, consoleOutput, STORE));
for (FreeTextSearchExtension extension : FreeTextSearchExtension.all()) {
doc.add(new TextField(extension.getKeyword(), extension.getTextResult(build),
(extension.isPersist()) ? STORE : DONT_STORE));
}
dbWriter.addDocument(doc);
} finally {
updateReader();
}
}
@Override
public void removeBuild(final AbstractBuild<?, ?> build) {
try {
dbWriter.deleteDocuments(new Term(Field.ID.fieldName, build.getId()));
updateReader();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("rawtypes")
@Override
public void cleanDeletedBuilds(Progress progress, Job job) {
try {
int firstBuildNumber = job.getFirstBuild().getNumber();
IndexSearcher searcher = new IndexSearcher(reader);
Term term = new Term(Field.PROJECT_NAME.fieldName, job.getName().toLowerCase(LOCALE));
Query q = new TermQuery(term).rewrite(reader);
TopDocs topDocs = searcher.search(q, 9999999);
for (int i = 0; i < topDocs.scoreDocs.length; i++) {
Document doc = searcher.doc(topDocs.scoreDocs[i].doc);
progress.setMax(reader.maxDoc());
progress.setCurrent(i);
Integer buildNumber = Integer.valueOf(doc.get(BUILD_NUMBER.fieldName));
if (firstBuildNumber > buildNumber) {
String id = doc.get(ID.fieldName);
dbWriter.deleteDocuments(new Term(ID.fieldName, id));
}
}
progress.setSuccessfullyCompleted();
updateReader();
} catch (IOException e) {
progress.completedWithErrors(e);
} finally {
progress.setFinished();
}
}
@Override
public void deleteJob(String jobName) {
try {
Term term = new Term(PROJECT_NAME.fieldName, jobName.toLowerCase(LOCALE));
dbWriter.deleteDocuments(term);
updateReader();
} catch (IOException e) {
e.printStackTrace();
}
}
@SuppressWarnings("rawtypes")
@Override
public void cleanDeletedJobs(Progress progress) {
try {
Set<String> jobNames = new HashSet<String>();
for (Job job : Jenkins.getInstance().getAllItems(Job.class)) {
jobNames.add(job.getName());
}
progress.setMax(jobNames.size());
IndexSearcher searcher = new IndexSearcher(reader);
DistinctCollector distinctCollector = new DistinctCollector(PROJECT_NAME.fieldName, searcher);
searcher.search(new MatchAllDocsQuery(), distinctCollector);
int i = 0;
for (String jobName : distinctCollector.getDistinctData()) {
progress.setCurrent(i);
if (!jobNames.contains(jobName)) {
deleteJob(jobName);
}
i++;
}
updateReader();
progress.setSuccessfullyCompleted();
} catch (IOException e) {
progress.completedWithErrors(e);
} finally {
progress.setFinished();
}
}
}
|
package net.continuumsecurity.junit;
import cucumber.api.CucumberOptions;
import cucumber.api.junit.Cucumber;
import net.continuumsecurity.scanner.ZapManager;
import net.continuumsecurity.web.drivers.DriverFactory;
import org.junit.AfterClass;
import org.junit.runner.RunWith;
@RunWith(Cucumber.class)
@CucumberOptions(
features = {
"src/test/resources/features/"
},
format = {"pretty", "html:build/reports/cucumber/html", "json:build/reports/cucumber/all_tests.json", "junit:build/reports/junit/all_tests.xml"},
glue = {"net.continuumsecurity.steps"},
tags = {"~@skip"}
)
public class SecurityTest {
@AfterClass
public static void tearDown() {
DriverFactory.quitAll();
ZapManager.getInstance().stopZap();
}
}
|
package arez.processor;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.Element;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeKind;
import javax.tools.Diagnostic;
@SuppressWarnings( { "SameParameterValue", "WeakerAccess", "unused" } )
final class MemberChecks
{
private MemberChecks()
{
}
/**
* Verifies that the method is not final, static, abstract or private.
* The intent is to verify that it can be overridden and wrapped in a sub-class in the same package.
*/
static void mustBeWrappable( @Nonnull final TypeElement targetType,
@Nonnull final String scopeAnnotationName,
@Nonnull final String annotationName,
@Nonnull final Element element )
throws ProcessorException
{
mustBeOverridable( targetType, scopeAnnotationName, annotationName, element );
mustNotBeAbstract( annotationName, element );
}
/**
* Verifies that the method is not final, static or abstract.
* The intent is to verify that it can be overridden in sub-class in the same package.
*/
static void mustBeOverridable( @Nonnull final TypeElement targetType,
@Nonnull final String scopeAnnotationName,
@Nonnull final String annotationName,
@Nonnull final Element element )
throws ProcessorException
{
mustNotBeFinal( annotationName, element );
mustBeSubclassCallable( targetType, scopeAnnotationName, annotationName, element );
}
/**
* Verifies that the method is not static, abstract or private.
* The intent is to verify that it can be instance called by sub-class in the same package as the targetType.
*/
static void mustBeSubclassCallable( @Nonnull final TypeElement targetType,
@Nonnull final String scopeAnnotationName,
@Nonnull final String annotationName,
@Nonnull final Element element )
throws ProcessorException
{
mustNotBeStatic( annotationName, element );
mustNotBePrivate( annotationName, element );
mustNotBePackageAccessInDifferentPackage( targetType, scopeAnnotationName, annotationName, element );
}
static void mustBeStaticallySubclassCallable( @Nonnull final TypeElement targetType,
@Nonnull final String scopeAnnotationName,
@Nonnull final String annotationName,
@Nonnull final Element method )
throws ProcessorException
{
mustBeStatic( annotationName, method );
mustNotBePrivate( annotationName, method );
mustNotBePackageAccessInDifferentPackage( targetType, scopeAnnotationName, annotationName, method );
}
/**
* Verifies that the method follows conventions of a lifecycle hook.
* The intent is to verify that it can be instance called by sub-class in same
* package at a lifecycle stage. It should not raise errors, return values or accept
* parameters.
*/
static void mustBeLifecycleHook( @Nonnull final TypeElement targetType,
@Nonnull final String scopeAnnotationName,
@Nonnull final String annotationName,
@Nonnull final ExecutableElement method )
throws ProcessorException
{
mustNotBeAbstract( annotationName, method );
mustBeSubclassCallable( targetType, scopeAnnotationName, annotationName, method );
mustNotHaveAnyParameters( annotationName, method );
mustNotReturnAnyValue( annotationName, method );
mustNotThrowAnyExceptions( annotationName, method );
}
static void mustBeStatic( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( !element.getModifiers().contains( Modifier.STATIC ) )
{
throw new ProcessorException( must( annotationName, "be static" ), element );
}
}
static void mustNotBeStatic( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( element.getModifiers().contains( Modifier.STATIC ) )
{
throw new ProcessorException( mustNot( annotationName, "be static" ), element );
}
}
static void mustBeAbstract( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( !element.getModifiers().contains( Modifier.ABSTRACT ) )
{
throw new ProcessorException( must( annotationName, "be abstract" ), element );
}
}
static void mustNotBeAbstract( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( element.getModifiers().contains( Modifier.ABSTRACT ) )
{
throw new ProcessorException( mustNot( annotationName, "be abstract" ), element );
}
}
static void mustBeFinal( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( !element.getModifiers().contains( Modifier.FINAL ) )
{
throw new ProcessorException( must( annotationName, "be final" ), element );
}
}
static void mustNotBeFinal( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( element.getModifiers().contains( Modifier.FINAL ) )
{
throw new ProcessorException( mustNot( annotationName, "be final" ), element );
}
}
static void mustBePublic( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( !element.getModifiers().contains( Modifier.PUBLIC ) )
{
throw new ProcessorException( must( annotationName, "be public" ), element );
}
}
static void mustNotBePublic( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( element.getModifiers().contains( Modifier.PUBLIC ) )
{
throw new ProcessorException( mustNot( annotationName, "be public" ), element );
}
}
static void mustBePrivate( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( !element.getModifiers().contains( Modifier.PRIVATE ) )
{
throw new ProcessorException( must( annotationName, "be private" ), element );
}
}
static void mustNotBePrivate( @Nonnull final String annotationName, @Nonnull final Element element )
throws ProcessorException
{
if ( element.getModifiers().contains( Modifier.PRIVATE ) )
{
throw new ProcessorException( mustNot( annotationName, "be private" ), element );
}
}
static void mustNotBePackageAccessInDifferentPackage( @Nonnull final TypeElement element,
@Nonnull final String scopeAnnotationName,
@Nonnull final String annotationName,
@Nonnull final Element other )
throws ProcessorException
{
final Set<Modifier> modifiers = other.getModifiers();
final boolean isPackageAccess =
!modifiers.contains( Modifier.PRIVATE ) &&
!modifiers.contains( Modifier.PROTECTED ) &&
!modifiers.contains( Modifier.PUBLIC );
if ( isPackageAccess )
{
final PackageElement packageElement = GeneratorUtil.getPackageElement( element );
final PackageElement otherPackageElement =
GeneratorUtil.getPackageElement( (TypeElement) other.getEnclosingElement() );
if ( !Objects.equals( packageElement.getQualifiedName(), otherPackageElement.getQualifiedName() ) )
{
throw new ProcessorException( mustNot( annotationName,
"be package access if the " +
( other instanceof ExecutableElement ? "method" : "field" ) +
" is in a different package from the type annotated with the " +
toSimpleName( scopeAnnotationName ) + " annotation" ),
other );
}
}
}
static void mustNotHaveAnyParameters( @Nonnull final String annotationName, @Nonnull final ExecutableElement method )
throws ProcessorException
{
if ( !method.getParameters().isEmpty() )
{
throw new ProcessorException( mustNot( annotationName, "have any parameters" ), method );
}
}
static void mustNotReturnAnyValue( @Nonnull final String annotationName, @Nonnull final ExecutableElement method )
throws ProcessorException
{
if ( TypeKind.VOID != method.getReturnType().getKind() )
{
throw new ProcessorException( mustNot( annotationName, "return a value" ), method );
}
}
static void mustReturnAValue( @Nonnull final String annotationName, @Nonnull final ExecutableElement method )
throws ProcessorException
{
if ( TypeKind.VOID == method.getReturnType().getKind() )
{
throw new ProcessorException( must( annotationName, "return a value" ), method );
}
}
static void mustNotThrowAnyExceptions( @Nonnull final String annotationName,
@Nonnull final ExecutableElement method )
throws ProcessorException
{
if ( !method.getThrownTypes().isEmpty() )
{
throw new ProcessorException( mustNot( annotationName, "throw any exceptions" ), method );
}
}
/**
* Ensure that the element is not annotated with multiple annotations from the specified set.
* The exceptions map exists to allow exceptions to this rule.
*
* @param element the element to check.
* @param annotations the set of annotation names that must not overlap.
* @param exceptions the annotations names that are allowed to overlap.
*/
static void verifyNoOverlappingAnnotations( @Nonnull final Element element,
@Nonnull final Collection<String> annotations,
@Nonnull final Map<String, Collection<String>> exceptions )
throws ProcessorException
{
final String[] annotationTypes = annotations.toArray( new String[ 0 ] );
for ( int i = 0; i < annotationTypes.length; i++ )
{
final String type1 = annotationTypes[ i ];
final Object annotation1 = AnnotationsUtil.findAnnotationByType( element, type1 );
if ( null != annotation1 )
{
for ( int j = i + 1; j < annotationTypes.length; j++ )
{
final String type2 = annotationTypes[ j ];
if ( !isException( exceptions, type1, type2 ) )
{
final Object annotation2 = AnnotationsUtil.findAnnotationByType( element, type2 );
if ( null != annotation2 )
{
final String message =
"Method can not be annotated with both " + toSimpleName( type1 ) + " and " + toSimpleName( type2 );
throw new ProcessorException( message, element );
}
}
}
}
}
}
private static boolean isException( @Nonnull final Map<String, Collection<String>> exceptions,
@Nonnull final String type1,
@Nonnull final String type2 )
{
return ( exceptions.containsKey( type1 ) && exceptions.get( type1 ).contains( type2 ) ) ||
exceptions.containsKey( type2 ) && exceptions.get( type2 ).contains( type1 );
}
@Nonnull
static String must( @Nonnull final String annotationName, @Nonnull final String message )
{
return toSimpleName( annotationName ) + " target must " + message;
}
@Nonnull
private static String mustNot( @Nonnull final String annotationName, @Nonnull final String message )
{
return must( annotationName, "not " + message );
}
@Nonnull
static String toSimpleName( @Nonnull final String annotationName )
{
return "@" + annotationName.replaceAll( ".*\\.", "" );
}
@Nonnull
static String suppressedBy( @Nonnull final String warning,
@Nullable final String alternativeSuppressWarnings )
{
return "This warning can be suppressed by annotating the element with " +
"@SuppressWarnings( \\\"" + warning + "\\\" )" +
( null == alternativeSuppressWarnings ?
"" :
" or " + toSimpleName( alternativeSuppressWarnings ) + "( \\\"" + warning + "\\\" )" );
}
static void shouldNotBePublic( @Nonnull final ProcessingEnvironment processingEnv,
@Nonnull final ExecutableElement method,
@Nonnull final String annotationName,
@Nonnull final String warning,
@Nullable final String alternativeSuppressWarnings )
{
if ( method.getModifiers().contains( Modifier.PUBLIC ) &&
!ProcessorUtil.isWarningSuppressed( method, warning, alternativeSuppressWarnings ) )
{
final String message =
toSimpleName( annotationName ) + " target should not be public. " +
suppressedBy( warning, alternativeSuppressWarnings );
processingEnv.getMessager().printMessage( Diagnostic.Kind.WARNING, message );
}
}
static void shouldNotBeProtected( @Nonnull final ProcessingEnvironment processingEnv,
@Nonnull final ExecutableElement method,
@Nonnull final String annotationName,
@Nonnull final String warning,
@Nullable final String alternativeSuppressWarnings )
{
if ( method.getModifiers().contains( Modifier.PROTECTED ) &&
!ProcessorUtil.isWarningSuppressed( method, warning, alternativeSuppressWarnings ) )
{
final String message =
toSimpleName( annotationName ) + " target should not be protected. " +
suppressedBy( warning, alternativeSuppressWarnings );
processingEnv.getMessager().printMessage( Diagnostic.Kind.WARNING, message );
}
}
}
|
package mb.scopegraph.oopsla20.diff;
import java.io.Serializable;
import java.util.Map.Entry;
import java.util.Set;
import io.usethesource.capsule.Map;
public abstract class BiMap<E> {
public abstract boolean containsKey(E key);
public abstract boolean containsValue(E value);
public abstract boolean containsEntry(E key, E value);
public abstract Set<E> keySet();
public abstract Set<E> valueSet();
public abstract Set<Map.Entry<E, E>> entrySet();
public static class Immutable<E> extends BiMap<E> implements Serializable {
private static final long serialVersionUID = 42L;
private final Map.Immutable<E, E> fwd;
private final Map.Immutable<E, E> bwd;
private Immutable(Map.Immutable<E, E> fwd, Map.Immutable<E, E> bwd) {
this.fwd = fwd;
this.bwd = bwd;
}
@Override public boolean containsKey(E key) {
return fwd.containsKey(key);
}
@Override public boolean containsValue(E value) {
return bwd.containsKey(value);
}
@Override public boolean containsEntry(E key, E value) {
return fwd.containsKey(key) && fwd.get(key).equals(value);
}
public E getKeyOrDefault(E key, E def) {
return fwd.getOrDefault(key, def);
}
public E getValueOrDefault(E value, E def) {
return bwd.getOrDefault(value, def);
}
@Override public Set<E> keySet() {
return fwd.keySet();
}
@Override public Set<E> valueSet() {
return bwd.keySet();
}
@Override public Set<Entry<E, E>> entrySet() {
return fwd.entrySet();
}
public Immutable<E> putAll(BiMap<E> other) {
final Transient<E> newMap = this.melt();
newMap.putAll(other);
return newMap.freeze();
}
public boolean isEmpty() {
return fwd.isEmpty();
}
public Transient<E> melt() {
return new Transient<>(fwd.asTransient(), bwd.asTransient());
}
public Map.Immutable<E, E> asMap() {
return fwd;
}
@Override public String toString() {
return fwd.toString();
}
public static <E> Immutable<E> of() {
return new Immutable<>(Map.Immutable.of(), Map.Immutable.of());
}
public static <E> Immutable<E> of(E key, E value) {
return new Immutable<>(Map.Immutable.of(key, value), Map.Immutable.of(value, key));
}
public static <E> Immutable<E> from(BiMap<E> other) {
Transient<E> newMap = BiMap.Transient.of();
newMap.putAll(other);
return newMap.freeze();
}
@SuppressWarnings("unchecked") @Override public boolean equals(Object obj) {
if(obj == null || obj.getClass() != this.getClass()) {
return false;
}
return fwd.equals(((BiMap.Immutable<E>) obj).fwd);
}
@Override public int hashCode() {
return fwd.hashCode();
}
public boolean canPut(E key, E value) {
if(fwd.containsKey(key) && !fwd.get(key).equals(value)) {
return false;
}
if(bwd.containsKey(value) && !bwd.get(value).equals(key)) {
return false;
}
return true;
}
public Immutable<E> put(E key, E value) {
if(!canPut(key, value)) {
throw new IllegalArgumentException("Key or value already set.");
}
return new Immutable<>(fwd.__put(key, value), bwd.__put(value, key));
}
}
public static class Transient<E> extends BiMap<E> {
private final Map.Transient<E, E> fwd;
private final Map.Transient<E, E> bwd;
private Transient(Map.Transient<E, E> fwd, Map.Transient<E, E> bwd) {
this.fwd = fwd;
this.bwd = bwd;
}
@Override public boolean containsKey(E key) {
return fwd.containsKey(key);
}
@Override public boolean containsValue(E value) {
return bwd.containsKey(value);
}
@Override public boolean containsEntry(E key, E value) {
return fwd.containsKey(key) && fwd.get(key).equals(value);
}
@Override public Set<E> keySet() {
return fwd.keySet();
}
@Override public Set<E> valueSet() {
return bwd.keySet();
}
@Override public Set<Entry<E, E>> entrySet() {
return fwd.entrySet();
}
public boolean canPut(E key, E value) {
if(fwd.containsKey(key) && !fwd.get(key).equals(value)) {
return false;
}
if(bwd.containsKey(value) && !bwd.get(value).equals(key)) {
return false;
}
return true;
}
public void put(E key, E value) {
if(!canPut(key, value)) {
throw new IllegalArgumentException("Key or value already set.");
}
fwd.__put(key, value);
bwd.__put(value, key);
}
public void putAll(BiMap<E> other) {
putAll(other.entrySet());
}
public void putAll(Iterable<Entry<E, E>> entries) {
entries.forEach(e -> put(e.getKey(), e.getValue()));
}
public Immutable<E> freeze() {
return new Immutable<>(fwd.freeze(), bwd.freeze());
}
@Override public String toString() {
return fwd.toString();
}
public static <E> Transient<E> of() {
return new Transient<>(Map.Transient.of(), Map.Transient.of());
}
public static <E> Transient<E> of(E key, E value) {
return new Transient<>(Map.Transient.of(key, value), Map.Transient.of(value, key));
}
public E getKey(E key) {
return fwd.get(key);
}
public E getValue(E value) {
return bwd.get(value);
}
}
}
|
package com.nuclearthinking.game.obj;
public class Player {
private static final double DIFFICULITY = 1.0;
private int hitPoints = 100;
private PlayerClass pClass;
private int level = 1;
private String name;
private double strength = 10;
private double intelligence = 10;
private double agility = 10;
private double stamina = 10;
public Player() {
}
public void levelUP() {
if (getpClass() instanceof RogueClass) {
RogueClass rClass = new RogueClass();
rClass.levelUp(this);
this.level++;
} else {
if (getpClass() instanceof MageClass) {
MageClass mClass = new MageClass();
mClass.levelUp(this);
this.level++;
} else {
if (getpClass() instanceof WarriorClass) {
WarriorClass wClass = new WarriorClass();
wClass.levelUp(this);
this.level++;
} else {
throw new RuntimeException("У обьекта" + this.getName() + "не задан класс :" + pClass + "=" + pClass.toString());
}
}
}
}
protected void addStrenght(double amount) {
strength = strength + amount;
}
protected void addIntelegence(double amount) {
intelligence = intelligence + amount;
}
protected void addAgility(double amount) {
agility = agility + amount;
}
protected void addStamina(double amount) {
stamina = stamina + amount;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getLevel() {
return level;
}
public PlayerClass getpClass() {
return pClass;
}
public void setClass(PlayerClass pClass) {
this.pClass = pClass;
}
public int getHitPoints() {
return hitPoints;
}
public void setHitPoints(int hitPoints) {
this.hitPoints = hitPoints;
}
public double getStrength() {
return strength;
}
public double getIntelligence() {
return intelligence;
}
public double getAgility() {
return agility;
}
public double getStamina() {
return stamina;
}
}
|
package org.spongepowered.api.event.cause.entity.damage;
import org.spongepowered.api.effect.potion.PotionEffect;
import org.spongepowered.api.effect.potion.PotionEffectType;
import org.spongepowered.api.effect.potion.PotionEffectTypes;
import org.spongepowered.api.entity.Entity;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.cause.entity.damage.source.FallingBlockDamageSource;
import org.spongepowered.api.item.enchantment.Enchantment;
import org.spongepowered.api.item.enchantment.EnchantmentType;
import org.spongepowered.api.item.inventory.ItemStack;
import org.spongepowered.api.item.inventory.ItemStackSnapshot;
import org.spongepowered.api.util.generator.dummy.DummyObjectProvider;
import org.spongepowered.api.world.World;
import org.spongepowered.api.world.difficulty.Difficulty;
public final class DamageModifierTypes {
// SORTFIELDS:ON
/**
* Represents a {@link DamageModifier} that "absorbs" damage based on
* the {@link PotionEffectTypes#ABSORPTION} level on the
* {@link Entity}.
*/
public static final DamageModifierType ABSORPTION = DummyObjectProvider.createFor(DamageModifierType.class, "ABSORPTION");
/**
* Represents a {@link DamageModifier} that will reduce damage based on
* the armor {@link ItemStack}s.
*/
public static final DamageModifierType ARMOR = DummyObjectProvider.createFor(DamageModifierType.class, "ARMOR");
/**
* Represents a {@link DamageModifier} that will reduce damage based on
* the {@link EnchantmentType}s applicable to an {@link ItemStack} that is
* considered to be "armor" currently equipped on the owner.
*
* <p>Usually, within the {@link DamageModifier#getCause()} will reside
* an {@link ItemStackSnapshot} and an {@link Enchantment} signifying
* that the {@link EnchantmentType} of the {@link ItemStack} is modifying the
* incoming/outgoing damage. There can be multiple {@link DamageModifier}s
* of this type in a single event due to the variety of possibilities in
* customization of armor handling.</p>
*/
public static final DamageModifierType ARMOR_ENCHANTMENT = DummyObjectProvider.createFor(DamageModifierType.class, "ARMOR_ENCHANTMENT");
/**
* Represents the {@link DamageModifier} that will reduce damage from a
* {@link Player} if their attack cooldown has not been completed yet.
*/
public static final DamageModifierType ATTACK_COOLDOWN = DummyObjectProvider.createFor(DamageModifierType.class, "ATTACK_COOLDOWN");
/**
* Represents the {@link DamageModifier} that will modify damage output
* based on the fact that the attacking source is critically hitting the
* target.
*/
public static final DamageModifierType CRITICAL_HIT = DummyObjectProvider.createFor(DamageModifierType.class, "CRITICAL_HIT");
/**
* Represents a {@link DamageModifier} that will reduce damage based on
* the {@link PotionEffectTypes#RESISTANCE} or any other
* {@link PotionEffectType} that can be deemed as reducing incoming damage.
*
* <p>Usually, within the {@link DamageModifier#getCause()} will reside
* a {@link PotionEffect} including the amplifier and duration, signifying
* that the {@link PotionEffectType} is modifying the incoming damage.</p>
*/
public static final DamageModifierType DEFENSIVE_POTION_EFFECT = DummyObjectProvider
.createFor(DamageModifierType.class, "DEFENSIVE_POTION_EFFECT");
/**
* Represents a {@link DamageModifier} that enhances damage based on the
* current {@link Difficulty} of the {@link World}.
*/
public static final DamageModifierType DIFFICULTY = DummyObjectProvider
.createFor(DamageModifierType.class, "DIFFICULTY");
/**
* Represents the {@link DamageModifier} that will modify damage from
* a {@link FallingBlockDamageSource}.
*
* <p>Usually, within the {@link DamageModifier#getCause()} will reside
* an {@link ItemStackSnapshot} and an {@link Enchantment} signifying
* that the {@link EnchantmentType} of the {@link ItemStack} is modifying the
* incoming/outgoing damage.</p>
*/
public static final DamageModifierType HARD_HAT = DummyObjectProvider.createFor(DamageModifierType.class, "HARD_HAT");
/**
* Represents a {@link DamageModifier} that will modify damage based on
* magic.
*/
public static final DamageModifierType MAGIC = DummyObjectProvider.createFor(DamageModifierType.class, "MAGIC");
/**
* Represents a {@link DamageModifier} that will reduce outgoing damage
* based on a {@link PotionEffect}.
*
* <p>Usually, within the {@link DamageModifier#getCause()} will reside
* a {@link PotionEffect} including the amplifier and duration, signifying
* that the {@link PotionEffectType} is reducing the outgoing damage.</p>
*/
public static final DamageModifierType NEGATIVE_POTION_EFFECT = DummyObjectProvider.createFor(DamageModifierType.class, "NEGATIVE_POTION_EFFECT");
/**
* Represents the {@link DamageModifier} that will increase damage from
* a {@link PotionEffect} affecting the attacker.
*/
public static final DamageModifierType OFFENSIVE_POTION_EFFECT = DummyObjectProvider
.createFor(DamageModifierType.class, "OFFENSIVE_POTION_EFFECT");
/**
* Represents a {@link DamageModifier} that will reduce damage due to
* using a shield.
*/
public static final DamageModifierType SHIELD = DummyObjectProvider.createFor(DamageModifierType.class, "SHIELD");
/**
* Represents a {@link DamageModifier} that is applied for a sweeping
* attack.
*/
public static final DamageModifierType SWEEPING = DummyObjectProvider.createFor(DamageModifierType.class, "SWEEPING");
/**
* Represents a {@link DamageModifier} that is applied for a sweaping
* attack.
*
* @deprecated use {@link #SWEEPING}
*/
@Deprecated
public static final DamageModifierType SWEAPING = SWEEPING;
/**
* Represents the {@link DamageModifier} that will modify damage from
* an {@link EnchantmentType} on an equipped {@link ItemStack}.
*
* <p>Usually, within the {@link DamageModifier#getCause()} will reside
* an {@link ItemStackSnapshot} and an {@link Enchantment} signifying
* that the {@link EnchantmentType} of the {@link ItemStack} is modifying the
* incoming/outgoing damage.</p>
*/
public static final DamageModifierType WEAPON_ENCHANTMENT = DummyObjectProvider.createFor(DamageModifierType.class, "WEAPON_ENCHANTMENT");
// SORTFIELDS:OFF
// Suppress default constructor to ensure non-instantiability.
private DamageModifierTypes() {
throw new AssertionError("You should not be attempting to instantiate this class.");
}
}
|
package com.jme3.gde.core.assets;
import com.jme3.asset.AssetEventListener;
import com.jme3.asset.AssetKey;
import com.jme3.export.Savable;
import com.jme3.export.binary.BinaryExporter;
import com.jme3.gde.core.scene.ApplicationLogHandler.LogLevel;
import com.jme3.gde.core.scene.SceneApplication;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.beanutils.BeanUtils;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectManager;
import org.openide.DialogDisplayer;
import org.openide.NotifyDescriptor;
import org.openide.cookies.SaveCookie;
import org.openide.filesystems.FileLock;
import org.openide.filesystems.FileObject;
import org.openide.loaders.DataNode;
import org.openide.loaders.DataObjectExistsException;
import org.openide.loaders.MultiDataObject;
import org.openide.loaders.MultiFileLoader;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.util.Exceptions;
import org.openide.util.Lookup;
import org.openide.util.lookup.AbstractLookup;
import org.openide.util.lookup.InstanceContent;
import org.openide.util.lookup.ProxyLookup;
/**
*
* @author normenhansen
*/
@SuppressWarnings("unchecked")
public class AssetDataObject extends MultiDataObject {
protected static final Logger logger = Logger.getLogger(AssetDataObject.class.getName());
protected final InstanceContent lookupContents = new InstanceContent();
protected final AbstractLookup contentLookup;
protected final Lookup lookup;
protected final AssetData assetData;
protected final ProjectAssetManager assetManager;
protected final AssetListListener listListener;
protected final List<FileObject> assetList = new LinkedList<FileObject>();
protected final List<AssetKey> assetKeyList = new LinkedList<AssetKey>();
protected final List<AssetKey> failedList = new LinkedList<AssetKey>();
protected SaveCookie saveCookie = new SaveCookie() {
public void save() throws IOException {
//TODO: On OpenGL thread? -- safest way.. with get()?
SceneApplication.getApplication().enqueue(new Callable() {
public Object call() throws Exception {
saveAsset();
return null;
}
});
}
};
protected DataNode dataNode;
protected AssetKey assetKey;
protected Savable savable;
protected String saveExtension;
public AssetDataObject(FileObject pf, MultiFileLoader loader) throws DataObjectExistsException, IOException {
super(pf, loader);
contentLookup = new AbstractLookup(lookupContents);
assetData = new AssetData(this);
lookupContents.add(assetData);
lookup = new ProxyLookup(getCookieSet().getLookup(), contentLookup);
listListener = new AssetListListener(this, assetList, assetKeyList, failedList);
assetManager = findAssetManager();
//assign savecookie (same as method)
setSaveCookie(saveCookie);
}
private ProjectAssetManager findAssetManager() {
FileObject file = getPrimaryFile();
ProjectManager pm = ProjectManager.getDefault();
while (file != null) {
if (file.isFolder() && pm.isProject(file)) {
try {
Project project = ProjectManager.getDefault().findProject(file);
if (project != null) {
ProjectAssetManager mgr = project.getLookup().lookup(ProjectAssetManager.class);
if (mgr != null) {
getLookupContents().add(mgr);
return mgr;
}
}
} catch (IOException ex) {
} catch (IllegalArgumentException ex) {
}
}
file = file.getParent();
}
return null;
}
@Override
protected Node createNodeDelegate() {
AssetDataNode node = new AssetDataNode(this, Children.LEAF, new ProxyLookup(getCookieSet().getLookup(), contentLookup));
node.setIconBaseWithExtension("com/jme3/gde/core/icons/jme-logo.png");
return node;
}
@Override
public synchronized void setModified(boolean modif) {
super.setModified(modif);
if (modif && saveCookie != null) {
getCookieSet().assign(SaveCookie.class, saveCookie);
} else {
getCookieSet().assign(SaveCookie.class);
}
}
@Override
public Lookup getLookup() {
return lookup;
}
public InstanceContent getLookupContents() {
return lookupContents;
}
public synchronized void setSaveCookie(SaveCookie cookie) {
this.saveCookie = cookie;
getCookieSet().assign(SaveCookie.class, saveCookie);
setModified(false);
}
/**
* Loads the asset from the DataObject via the ProjectAssetManager in the
* lookup. Returns the currently loaded asset when it has been loaded
* already, close the asset using closeAsset().
*
* @return
*/
public synchronized Savable loadAsset() {
if (savable != null) {
return savable;
}
ProjectAssetManager mgr = getLookup().lookup(ProjectAssetManager.class);
if (mgr == null) {
DialogDisplayer.getDefault().notifyLater(new NotifyDescriptor.Message("File is not part of a project!\nCannot load without ProjectAssetManager."));
return null;
}
//make sure its actually closed and all data gets reloaded
closeAsset();
FileLock lock = null;
try {
lock = getPrimaryFile().lock();
listListener.start();
Savable spatial = (Savable) mgr.loadAsset(getAssetKey());
listListener.stop();
lock.releaseLock();
savable = spatial;
logger.log(Level.INFO, "Loaded asset {0}", getName());
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
} finally {
if (lock != null) {
lock.releaseLock();
}
}
return savable;
}
/**
* Saves this asset, when a saveExtension is set, saves it as a brother file
* with that extension.
*
* @throws IOException
*/
public synchronized void saveAsset() throws IOException {
if (savable == null) {
logger.log(Level.WARNING, "Trying to write asset failed, asset data null!\nImport failed?");
return;
}
final Savable savable = this.savable;
ProgressHandle progressHandle = ProgressHandleFactory.createHandle("Saving File..");
progressHandle.start();
BinaryExporter exp = BinaryExporter.getInstance();
FileLock lock = null;
OutputStream out = null;
try {
if (saveExtension == null) {
out = getPrimaryFile().getOutputStream();
} else {
FileObject outFileObject = getPrimaryFile().getParent().getFileObject(getPrimaryFile().getName(), saveExtension);
if (outFileObject == null) {
outFileObject = getPrimaryFile().getParent().createData(getPrimaryFile().getName(), saveExtension);
}
out = outFileObject.getOutputStream();
outFileObject.getParent().refresh();
}
exp.save(savable, out);
} finally {
if (lock != null) {
lock.releaseLock();
}
if (out != null) {
out.close();
}
}
progressHandle.finish();
setModified(false);
logger.log(LogLevel.USERINFO, "File {0} saved successfully", getPrimaryFile().getNameExt());
}
/**
* Closes this asset so that loadAsset will cause it to be loaded
*/
public synchronized void closeAsset() {
ProjectAssetManager mgr = getLookup().lookup(ProjectAssetManager.class);
if (mgr != null && savable != null) {
logger.log(Level.INFO, "Closing asset {0}, deleting from cache.", getName());
mgr.deleteFromCache(getAssetKey());
//delete referenced assets too
for (Iterator<AssetKey> it = assetKeyList.iterator(); it.hasNext();) {
AssetKey assetKey1 = it.next();
logger.log(Level.INFO, "Removing linked asset {0}, from cache via main asset {1}.", new Object[]{assetKey1.getName(), getName()});
mgr.deleteFromCache(assetKey1);
}
savable = null;
} else if (mgr == null) {
logger.log(Level.WARNING, "Closing asset {0} with no ProjectAssetManager assigned..?", getName());
}
}
/**
* Returns the AssetKey of this asset type. When extending AssetDataObject
* or a subtype the class should override this so the key type and
* properties can be recognized properly:
* <pre>
* public synchronized MyKeyType getAssetKey() {
* //return key if already set
* if(super.getAssetKey() instanceof MyKeyType){
* return (MyKeyType)assetKey;
* }
* //set own key type and return
* assetKey = new MyKeyType(super.getAssetKey().getName());
* return (MyKeyType)assetKey;
* }
* </pre>
*
* @return
*/
public synchronized AssetKey<?> getAssetKey() {
if (assetKey == null) {
ProjectAssetManager mgr = getLookup().lookup(ProjectAssetManager.class);
if (mgr == null) {
return null;
}
String assetKey = mgr.getRelativeAssetPath(getPrimaryFile().getPath());
this.assetKey = new AssetKey<Object>(assetKey);
}
return assetKey;
}
/**
* Applies the supplied keys data to the assets assetKey so it will be
* loaded with these settings next time loadAsset is actually loading the
* asset from the ProjectAssetManager.
*
* @param key
*/
public synchronized void setAssetKeyData(AssetKey key) {
try {
BeanUtils.copyProperties(getAssetKey(), key);
} catch (IllegalAccessException ex) {
Exceptions.printStackTrace(ex);
} catch (InvocationTargetException ex) {
Exceptions.printStackTrace(ex);
}
}
/**
* Gets a list of FileObjects that represent all files that have been loaded
* along this asset. This includes textures for models as well as materials
* and other files.
*
* @return
*/
public synchronized List<FileObject> getAssetList() {
return new LinkedList<FileObject>(assetList);
}
/**
* Gets a list of AssetKeys that represent all files that have been loaded
* along this asset. This includes textures for models as well as materials
* and other files.
*
* @return
*/
public synchronized List<AssetKey> getAssetKeyList() {
return new LinkedList<AssetKey>(assetKeyList);
}
/**
* Gets a list of AssetKeys that represent all files that failed to load for
* this asset. These were tried to be located but could not be found by the
* import AssetManager.
*
* @return
*/
public synchronized List<AssetKey> getFailedList() {
return new LinkedList<AssetKey>(failedList);
}
protected static class AssetListListener implements AssetEventListener {
private AssetDataObject obj;
private List<FileObject> assetList;
private List<AssetKey> assetKeyList;
private List<AssetKey> failedList;
private Thread loadingThread;
public AssetListListener(AssetDataObject obj, List<FileObject> assetList, List<AssetKey> assetKeyList, List<AssetKey> failedList) {
this.obj = obj;
this.assetList = assetList;
this.assetKeyList = assetKeyList;
this.failedList = failedList;
}
public void assetLoaded(AssetKey ak) {
}
public void assetRequested(AssetKey ak) {
ProjectAssetManager pm = obj.getLookup().lookup(ProjectAssetManager.class);
if (pm == null || loadingThread != Thread.currentThread()) {
return;
}
FileObject fObj = pm.getAssetFileObject(ak);
if (fObj != null && !assetList.contains(fObj)) {
assetList.add(fObj);
assetKeyList.add(ak);
}
}
public void assetDependencyNotFound(AssetKey ak, AssetKey ak1) {
ProjectAssetManager pm = obj.getLookup().lookup(ProjectAssetManager.class);
if (pm == null || loadingThread != Thread.currentThread()) {
return;
}
FileObject fObj = pm.getAssetFileObject(ak1);
if (fObj != null && assetList.contains(fObj)) {
assetList.remove(fObj);
assetKeyList.remove(ak1);
}
if (!failedList.contains(ak1)) {
failedList.add(ak1);
}
}
public void start() {
ProjectAssetManager pm = obj.getLookup().lookup(ProjectAssetManager.class);
loadingThread = Thread.currentThread();
assetList.clear();
assetKeyList.clear();
failedList.clear();
if (pm == null) {
return;
}
pm.addAssetEventListener(this);
}
public void stop() {
ProjectAssetManager pm = obj.getLookup().lookup(ProjectAssetManager.class);
if (pm == null) {
return;
}
pm.removeAssetEventListener(this);
}
};
}
|
package com.cardshifter.gdx.ui.res;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
import java.util.Map;
public class ColoredResView extends ResView {
private final Label label;
private final String key;
private final String originalKey;
private int original;
public ColoredResView(Skin skin, String key, Map<String, Object> properties) {
this(skin, key, (String) null);
Integer value = (Integer) properties.get(key);
this.original = value == null ? 0 : value;
}
public ColoredResView(Skin skin, String key, String originalKey) {
this.label = new Label("", skin);
this.key = key;
this.originalKey = originalKey;
}
@Override
public Actor getActor() {
return label;
}
@Override
public void update(Map<String, ? extends Object> properties) {
if (originalKey != null) {
Integer i = (Integer) properties.get(originalKey);
if (i != null) {
original = i;
}
}
Integer value = (Integer) properties.get(key);
if (value == null) {
return;
}
this.label.setText(String.valueOf(value));
if (value < original) {
this.label.setColor(1, 0, 0, 1);
}
else if (value > original) {
this.label.setColor(0, 1, 0, 1);
}
else {
this.label.setColor(1, 1, 1, 1);
}
}
}
|
package org.spongepowered.common.mixin.core.entity.player;
import com.google.common.collect.ImmutableList;
import com.mojang.authlib.GameProfile;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.PlayerCapabilities;
import net.minecraft.inventory.Container;
import net.minecraft.scoreboard.Scoreboard;
import net.minecraft.util.BlockPos;
import net.minecraft.util.FoodStats;
import net.minecraft.util.IChatComponent;
import org.spongepowered.api.entity.Entity;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.entity.CollideEntityEvent;
import org.spongepowered.api.util.annotation.NonnullByDefault;
import org.spongepowered.api.world.World;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.common.Sponge;
import org.spongepowered.common.interfaces.IMixinEntityPlayer;
import org.spongepowered.common.mixin.core.entity.living.MixinEntityLivingBase;
import org.spongepowered.common.util.VecHelper;
import java.util.List;
@NonnullByDefault
@Mixin(EntityPlayer.class)
public abstract class MixinEntityPlayer extends MixinEntityLivingBase implements IMixinEntityPlayer {
@Shadow public Container inventoryContainer;
@Shadow public Container openContainer;
@Shadow public int experienceLevel;
@Shadow public int experienceTotal;
@Shadow public float experience;
@Shadow public PlayerCapabilities capabilities;
@Shadow public abstract int xpBarCap();
@Shadow public abstract FoodStats getFoodStats();
@Shadow public abstract GameProfile getGameProfile();
@Shadow public abstract IChatComponent getDisplayName();
@Shadow public abstract void addExperience(int amount);
@Shadow public abstract Scoreboard getWorldScoreboard();
@Shadow public abstract boolean isSpectator();
@Shadow private BlockPos spawnChunk;
@Shadow private BlockPos playerLocation;
@Shadow protected FoodStats foodStats;
@SuppressWarnings({"rawtypes", "unchecked"})
@Redirect(method = "onLivingUpdate", at = @At(value = "INVOKE", target="Lnet/minecraft/world/World;getEntitiesWithinAABBExcludingEntity(Lnet/minecraft/entity/Entity;Lnet/minecraft/util/AxisAlignedBB;)Ljava/util/List;"))
public List onGetEntitiesWithinAABB(net.minecraft.world.World world, net.minecraft.entity.Entity entity, net.minecraft.util.AxisAlignedBB aabb) {
List list = world.getEntitiesWithinAABBExcludingEntity(entity, aabb);
CollideEntityEvent event = SpongeEventFactory.createCollideEntityEvent(Sponge.getGame(), Cause.of(this), (List<Entity>)(List<?>)ImmutableList.copyOf(list), (List<Entity>)(List<?>)list, (World) this.worldObj);
Sponge.getGame().getEventManager().post(event);
if (event.isCancelled()) {
list.clear();
}
return list;
}
// utility method for getting the total experience at an arbitrary level
// the formulas here are basically (slightly modified) integrals of those of EntityPlayer#xpBarCap()
private int xpAtLevel(int level) {
if (level > 30) {
return (int) (4.5 * Math.pow(level, 2) - 162.5 * level + 2220);
} else if (level > 15) {
return (int) (2.5 * Math.pow(level, 2) - 40.5 * level + 360);
} else {
return (int) (Math.pow(level, 2) + 6 * level);
}
}
public int getExperienceSinceLevel() {
return this.getTotalExperience() - xpAtLevel(this.getLevel());
}
public void setExperienceSinceLevel(int experience) {
this.setTotalExperience(xpAtLevel(this.experienceLevel) + experience);
}
public int getExperienceBetweenLevels() {
return this.xpBarCap();
}
public int getLevel() {
return this.experienceLevel;
}
public void setLevel(int level) {
this.experienceLevel = level;
}
public int getTotalExperience() {
return this.experienceTotal;
}
public void setTotalExperience(int exp) {
this.experienceTotal = exp;
}
public boolean isFlying() {
return this.capabilities.isFlying;
}
public void setFlying(boolean flying) {
this.capabilities.isFlying = flying;
}
@Redirect(method = "onUpdate", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/EntityPlayer;isPlayerSleeping()Z"))
public boolean onIsPlayerSleeping(EntityPlayer self) {
if (self.isPlayerSleeping()) {
if (!this.worldObj.isRemote) {
Sponge.getGame().getEventManager()
.post(SpongeEventFactory.
createSleepingEventTick(Sponge.getGame(), Cause.of(this),
this.getWorld().createSnapshot(VecHelper.toVector(this.playerLocation)), this));
}
return true;
}
return false;
}
}
|
package com.smartdevicelink.proxy;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.net.URL;
import java.util.Hashtable;
import java.util.List;
import java.util.Vector;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.telephony.TelephonyManager;
import android.util.Log;
import android.util.SparseArray;
import com.smartdevicelink.Dispatcher.IDispatchingStrategy;
import com.smartdevicelink.Dispatcher.IncomingProtocolMessageComparitor;
import com.smartdevicelink.Dispatcher.InternalProxyMessageComparitor;
import com.smartdevicelink.Dispatcher.OutgoingProtocolMessageComparitor;
import com.smartdevicelink.Dispatcher.ProxyMessageDispatcher;
import com.smartdevicelink.SdlConnection.ISdlConnectionListener;
import com.smartdevicelink.SdlConnection.SdlConnection;
import com.smartdevicelink.SdlConnection.SdlSession;
import com.smartdevicelink.exception.SdlException;
import com.smartdevicelink.exception.SdlExceptionCause;
import com.smartdevicelink.marshal.JsonRPCMarshaller;
import com.smartdevicelink.protocol.ProtocolMessage;
import com.smartdevicelink.protocol.enums.FunctionID;
import com.smartdevicelink.protocol.enums.MessageType;
import com.smartdevicelink.protocol.enums.SessionType;
import com.smartdevicelink.protocol.heartbeat.HeartbeatMonitor;
import com.smartdevicelink.proxy.callbacks.InternalProxyMessage;
import com.smartdevicelink.proxy.callbacks.OnError;
import com.smartdevicelink.proxy.callbacks.OnProxyClosed;
import com.smartdevicelink.proxy.interfaces.IProxyListenerALM;
import com.smartdevicelink.proxy.interfaces.IProxyListenerBase;
import com.smartdevicelink.proxy.rpc.AddCommand;
import com.smartdevicelink.proxy.rpc.AddCommandResponse;
import com.smartdevicelink.proxy.rpc.AddSubMenu;
import com.smartdevicelink.proxy.rpc.AddSubMenuResponse;
import com.smartdevicelink.proxy.rpc.Alert;
import com.smartdevicelink.proxy.rpc.AlertResponse;
import com.smartdevicelink.proxy.rpc.AudioPassThruCapabilities;
import com.smartdevicelink.proxy.rpc.ButtonCapabilities;
import com.smartdevicelink.proxy.rpc.ChangeRegistration;
import com.smartdevicelink.proxy.rpc.ChangeRegistrationResponse;
import com.smartdevicelink.proxy.rpc.Choice;
import com.smartdevicelink.proxy.rpc.CreateInteractionChoiceSet;
import com.smartdevicelink.proxy.rpc.CreateInteractionChoiceSetResponse;
import com.smartdevicelink.proxy.rpc.DeleteCommand;
import com.smartdevicelink.proxy.rpc.DeleteCommandResponse;
import com.smartdevicelink.proxy.rpc.DeleteFile;
import com.smartdevicelink.proxy.rpc.DeleteFileResponse;
import com.smartdevicelink.proxy.rpc.DeleteInteractionChoiceSet;
import com.smartdevicelink.proxy.rpc.DeleteInteractionChoiceSetResponse;
import com.smartdevicelink.proxy.rpc.DeleteSubMenu;
import com.smartdevicelink.proxy.rpc.DeleteSubMenuResponse;
import com.smartdevicelink.proxy.rpc.DiagnosticMessageResponse;
import com.smartdevicelink.proxy.rpc.DisplayCapabilities;
import com.smartdevicelink.proxy.rpc.EndAudioPassThru;
import com.smartdevicelink.proxy.rpc.EndAudioPassThruResponse;
import com.smartdevicelink.proxy.rpc.GenericResponse;
import com.smartdevicelink.proxy.rpc.GetDTCsResponse;
import com.smartdevicelink.proxy.rpc.GetVehicleData;
import com.smartdevicelink.proxy.rpc.GetVehicleDataResponse;
import com.smartdevicelink.proxy.rpc.Headers;
import com.smartdevicelink.proxy.rpc.Image;
import com.smartdevicelink.proxy.rpc.ListFiles;
import com.smartdevicelink.proxy.rpc.ListFilesResponse;
import com.smartdevicelink.proxy.rpc.OnAppInterfaceUnregistered;
import com.smartdevicelink.proxy.rpc.OnAudioPassThru;
import com.smartdevicelink.proxy.rpc.OnButtonEvent;
import com.smartdevicelink.proxy.rpc.OnButtonPress;
import com.smartdevicelink.proxy.rpc.OnCommand;
import com.smartdevicelink.proxy.rpc.OnDriverDistraction;
import com.smartdevicelink.proxy.rpc.OnHMIStatus;
import com.smartdevicelink.proxy.rpc.OnHashChange;
import com.smartdevicelink.proxy.rpc.OnKeyboardInput;
import com.smartdevicelink.proxy.rpc.OnLanguageChange;
import com.smartdevicelink.proxy.rpc.OnPermissionsChange;
import com.smartdevicelink.proxy.rpc.OnSystemRequest;
import com.smartdevicelink.proxy.rpc.OnTBTClientState;
import com.smartdevicelink.proxy.rpc.OnTouchEvent;
import com.smartdevicelink.proxy.rpc.OnVehicleData;
import com.smartdevicelink.proxy.rpc.PerformAudioPassThru;
import com.smartdevicelink.proxy.rpc.PerformAudioPassThruResponse;
import com.smartdevicelink.proxy.rpc.PerformInteraction;
import com.smartdevicelink.proxy.rpc.PerformInteractionResponse;
import com.smartdevicelink.proxy.rpc.PresetBankCapabilities;
import com.smartdevicelink.proxy.rpc.PutFile;
import com.smartdevicelink.proxy.rpc.PutFileResponse;
import com.smartdevicelink.proxy.rpc.ReadDIDResponse;
import com.smartdevicelink.proxy.rpc.RegisterAppInterface;
import com.smartdevicelink.proxy.rpc.RegisterAppInterfaceResponse;
import com.smartdevicelink.proxy.rpc.ResetGlobalProperties;
import com.smartdevicelink.proxy.rpc.ResetGlobalPropertiesResponse;
import com.smartdevicelink.proxy.rpc.ScrollableMessage;
import com.smartdevicelink.proxy.rpc.ScrollableMessageResponse;
import com.smartdevicelink.proxy.rpc.SdlMsgVersion;
import com.smartdevicelink.proxy.rpc.SetAppIcon;
import com.smartdevicelink.proxy.rpc.SetAppIconResponse;
import com.smartdevicelink.proxy.rpc.SetDisplayLayout;
import com.smartdevicelink.proxy.rpc.SetDisplayLayoutResponse;
import com.smartdevicelink.proxy.rpc.SetGlobalProperties;
import com.smartdevicelink.proxy.rpc.SetGlobalPropertiesResponse;
import com.smartdevicelink.proxy.rpc.SetMediaClockTimer;
import com.smartdevicelink.proxy.rpc.SetMediaClockTimerResponse;
import com.smartdevicelink.proxy.rpc.Show;
import com.smartdevicelink.proxy.rpc.ShowResponse;
import com.smartdevicelink.proxy.rpc.Slider;
import com.smartdevicelink.proxy.rpc.SliderResponse;
import com.smartdevicelink.proxy.rpc.SoftButton;
import com.smartdevicelink.proxy.rpc.SoftButtonCapabilities;
import com.smartdevicelink.proxy.rpc.Speak;
import com.smartdevicelink.proxy.rpc.SpeakResponse;
import com.smartdevicelink.proxy.rpc.SubscribeButton;
import com.smartdevicelink.proxy.rpc.SubscribeButtonResponse;
import com.smartdevicelink.proxy.rpc.SubscribeVehicleData;
import com.smartdevicelink.proxy.rpc.SubscribeVehicleDataResponse;
import com.smartdevicelink.proxy.rpc.SystemRequest;
import com.smartdevicelink.proxy.rpc.SystemRequestResponse;
import com.smartdevicelink.proxy.rpc.TTSChunk;
import com.smartdevicelink.proxy.rpc.UnregisterAppInterface;
import com.smartdevicelink.proxy.rpc.UnregisterAppInterfaceResponse;
import com.smartdevicelink.proxy.rpc.UnsubscribeButton;
import com.smartdevicelink.proxy.rpc.UnsubscribeButtonResponse;
import com.smartdevicelink.proxy.rpc.UnsubscribeVehicleData;
import com.smartdevicelink.proxy.rpc.UnsubscribeVehicleDataResponse;
import com.smartdevicelink.proxy.rpc.VehicleType;
import com.smartdevicelink.proxy.rpc.VrHelpItem;
import com.smartdevicelink.proxy.rpc.enums.AppHMIType;
import com.smartdevicelink.proxy.rpc.enums.AudioStreamingState;
import com.smartdevicelink.proxy.rpc.enums.AudioType;
import com.smartdevicelink.proxy.rpc.enums.BitsPerSample;
import com.smartdevicelink.proxy.rpc.enums.ButtonName;
import com.smartdevicelink.proxy.rpc.enums.DriverDistractionState;
import com.smartdevicelink.proxy.rpc.enums.FileType;
import com.smartdevicelink.proxy.rpc.enums.GlobalProperty;
import com.smartdevicelink.proxy.rpc.enums.HMILevel;
import com.smartdevicelink.proxy.rpc.enums.HmiZoneCapabilities;
import com.smartdevicelink.proxy.rpc.enums.ImageType;
import com.smartdevicelink.proxy.rpc.enums.InteractionMode;
import com.smartdevicelink.proxy.rpc.enums.Language;
import com.smartdevicelink.proxy.rpc.enums.PrerecordedSpeech;
import com.smartdevicelink.proxy.rpc.enums.RequestType;
import com.smartdevicelink.proxy.rpc.enums.Result;
import com.smartdevicelink.proxy.rpc.enums.SamplingRate;
import com.smartdevicelink.proxy.rpc.enums.SdlConnectionState;
import com.smartdevicelink.proxy.rpc.enums.SdlDisconnectedReason;
import com.smartdevicelink.proxy.rpc.enums.SdlInterfaceAvailability;
import com.smartdevicelink.proxy.rpc.enums.SpeechCapabilities;
import com.smartdevicelink.proxy.rpc.enums.SystemContext;
import com.smartdevicelink.proxy.rpc.enums.TextAlignment;
import com.smartdevicelink.proxy.rpc.enums.UpdateMode;
import com.smartdevicelink.proxy.rpc.enums.VrCapabilities;
import com.smartdevicelink.trace.SdlTrace;
import com.smartdevicelink.trace.TraceDeviceInfo;
import com.smartdevicelink.trace.enums.InterfaceActivityDirection;
import com.smartdevicelink.transport.BaseTransportConfig;
import com.smartdevicelink.transport.SiphonServer;
import com.smartdevicelink.transport.TransportType;
import com.smartdevicelink.util.DebugTool;
public abstract class SdlProxyBase<proxyListenerType extends IProxyListenerBase> {
// Used for calls to Android Log class.
public static final String TAG = "SdlProxy";
private static final String SDL_LIB_TRACE_KEY = "42baba60-eb57-11df-98cf-0800200c9a66";
private static final int PROX_PROT_VER_ONE = 1;
private SdlSession sdlSession = null;
private proxyListenerType _proxyListener = null;
protected Service _appService = null;
private String sPoliciesURL = ""; //for testing only
// Protected Correlation IDs
private final int REGISTER_APP_INTERFACE_CORRELATION_ID = 65529,
UNREGISTER_APP_INTERFACE_CORRELATION_ID = 65530,
POLICIES_CORRELATION_ID = 65535;
// Sdlhronization Objects
private static final Object CONNECTION_REFERENCE_LOCK = new Object(),
INCOMING_MESSAGE_QUEUE_THREAD_LOCK = new Object(),
OUTGOING_MESSAGE_QUEUE_THREAD_LOCK = new Object(),
INTERNAL_MESSAGE_QUEUE_THREAD_LOCK = new Object(),
ON_UPDATE_LISTENER_LOCK = new Object();
private Object APP_INTERFACE_REGISTERED_LOCK = new Object();
private int iFileCount = 0;
private boolean navServiceResponseReceived = false;
private boolean navServiceResponse = false;
@SuppressWarnings("unused")
private boolean pcmServiceResponseReceived = false;
@SuppressWarnings("unused")
private boolean pcmServiceResponse = false;
// Device Info for logging
private TraceDeviceInfo _traceDeviceInterrogator = null;
// Declare Queuing Threads
private ProxyMessageDispatcher<ProtocolMessage> _incomingProxyMessageDispatcher;
private ProxyMessageDispatcher<ProtocolMessage> _outgoingProxyMessageDispatcher;
private ProxyMessageDispatcher<InternalProxyMessage> _internalProxyMessageDispatcher;
// Flag indicating if callbacks should be called from UIThread
private Boolean _callbackToUIThread = false;
// UI Handler
private Handler _mainUIHandler = null;
final int HEARTBEAT_CORRELATION_ID = 65531;
// SdlProxy Advanced Lifecycle Management
protected Boolean _advancedLifecycleManagementEnabled = false;
// Parameters passed to the constructor from the app to register an app interface
private String _applicationName = null;
private long instanceDateTime = System.currentTimeMillis();
private String sConnectionDetails = "N/A";
private Vector<TTSChunk> _ttsName = null;
private String _ngnMediaScreenAppName = null;
private Boolean _isMediaApp = null;
private Language _sdlLanguageDesired = null;
private Language _hmiDisplayLanguageDesired = null;
private Vector<AppHMIType> _appType = null;
private String _appID = null;
private String _autoActivateIdDesired = null;
private String _lastHashID = null;
private SdlMsgVersion _sdlMsgVersionRequest = null;
private Vector<String> _vrSynonyms = null;
private boolean _bAppResumeEnabled = false;
/**
* Contains current configuration for the transport that was selected during
* construction of this object
*/
private BaseTransportConfig _transportConfig = null;
// Proxy State Variables
protected Boolean _appInterfaceRegisterd = false;
protected Boolean _preRegisterd = false;
@SuppressWarnings("unused")
private Boolean _haveReceivedFirstNonNoneHMILevel = false;
protected Boolean _haveReceivedFirstFocusLevel = false;
protected Boolean _haveReceivedFirstFocusLevelFull = false;
protected Boolean _proxyDisposed = false;
protected SdlConnectionState _sdlConnectionState = null;
protected SdlInterfaceAvailability _sdlIntefaceAvailablity = null;
protected HMILevel _hmiLevel = null;
private HMILevel _priorHmiLevel = null;
protected AudioStreamingState _audioStreamingState = null;
private AudioStreamingState _priorAudioStreamingState = null;
protected SystemContext _systemContext = null;
// Variables set by RegisterAppInterfaceResponse
protected SdlMsgVersion _sdlMsgVersion = null;
protected String _autoActivateIdReturned = null;
protected Language _sdlLanguage = null;
protected Language _hmiDisplayLanguage = null;
protected DisplayCapabilities _displayCapabilities = null;
protected List<ButtonCapabilities> _buttonCapabilities = null;
protected List<SoftButtonCapabilities> _softButtonCapabilities = null;
protected PresetBankCapabilities _presetBankCapabilities = null;
protected List<HmiZoneCapabilities> _hmiZoneCapabilities = null;
protected List<SpeechCapabilities> _speechCapabilities = null;
protected List<PrerecordedSpeech> _prerecordedSpeech = null;
protected List<VrCapabilities> _vrCapabilities = null;
protected VehicleType _vehicleType = null;
protected List<AudioPassThruCapabilities> _audioPassThruCapabilities = null;
protected List<Integer> _diagModes = null;
protected Boolean firstTimeFull = true;
protected String _proxyVersionInfo = null;
protected Boolean _bResumeSuccess = false;
protected byte _wiproVersion = 1;
protected SparseArray<OnUpdateListener> rpcUpdateListeners = null;
// Interface broker
private SdlInterfaceBroker _interfaceBroker = null;
// Private Class to Interface with SdlConnection
private class SdlInterfaceBroker implements ISdlConnectionListener {
@Override
public void onTransportDisconnected(String info) {
// proxyOnTransportDisconnect is called to alert the proxy that a requested
// disconnect has completed
if (_advancedLifecycleManagementEnabled) {
// If ALM, nothing is required to be done here
} else {
// If original model, notify app the proxy is closed so it will delete and reinstanciate
notifyProxyClosed(info, new SdlException("Transport disconnected.", SdlExceptionCause.SDL_UNAVAILABLE), SdlDisconnectedReason.TRANSPORT_DISCONNECT);
}
}
@Override
public void onTransportError(String info, Exception e) {
DebugTool.logError("Transport failure: " + info, e);
if (_advancedLifecycleManagementEnabled) {
// Cycle the proxy
cycleProxy(SdlDisconnectedReason.TRANSPORT_ERROR);
} else {
notifyProxyClosed(info, e, SdlDisconnectedReason.TRANSPORT_ERROR);
}
}
@Override
public void onProtocolMessageReceived(ProtocolMessage msg) {
// AudioPathThrough is coming WITH BulkData but WITHOUT JSON Data
// Policy Snapshot is coming WITH BulkData and WITH JSON Data
if ((msg.getData() != null && msg.getData().length > 0) ||
(msg.getBulkData() != null && msg.getBulkData().length > 0)) {
queueIncomingMessage(msg);
}
}
@Override
public void onProtocolSessionStarted(SessionType sessionType,
byte sessionID, byte version, String correlationID) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onProtocolSessionStarted");
updateBroadcastIntent(sendIntent, "COMMENT1", "SessionID: " + sessionID);
updateBroadcastIntent(sendIntent, "COMMENT2", " SessionType: " + sessionType.getName());
sendBroadcastIntent(sendIntent);
setWiProVersion(version);
if ( (_transportConfig.getHeartBeatTimeout() != Integer.MAX_VALUE) && (version > 2) )
{
HeartbeatMonitor heartbeatMonitor = new HeartbeatMonitor();
heartbeatMonitor.setInterval(_transportConfig.getHeartBeatTimeout());
sdlSession.setHeartbeatMonitor(heartbeatMonitor);
}
if (sessionType.eq(SessionType.RPC)) {
startRPCProtocolSession(sessionID, correlationID);
} else if (sessionType.eq(SessionType.NAV)) {
NavServiceStarted();
} else if (_wiproVersion > 1) {
//If version is 2 or above then don't need to specify a Session Type
startRPCProtocolSession(sessionID, correlationID);
} else {
// Handle other protocol session types here
}
}
@Override
public void onProtocolSessionNACKed(SessionType sessionType,
byte sessionID, byte version, String correlationID) {
if (sessionType.eq(SessionType.NAV)) {
NavServiceEnded();
}
}
@Override
public void onProtocolSessionEnded(SessionType sessionType,
byte sessionID, String correlationID) {
// How to handle protocol session ended?
// How should protocol session management occur?
}
@Override
public void onProtocolError(String info, Exception e) {
passErrorToProxyListener(info, e);
}
@Override
public void onHeartbeatTimedOut(byte sessionID) {
final String msg = "Heartbeat timeout";
DebugTool.logInfo(msg);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "onHeartbeatTimedOut");
updateBroadcastIntent(sendIntent, "COMMENT1", "Heartbeat timeout for SessionID: " + sessionID);
sendBroadcastIntent(sendIntent);
notifyProxyClosed(msg, new SdlException(msg, SdlExceptionCause.HEARTBEAT_PAST_DUE), SdlDisconnectedReason.HB_TIMEOUT);
}
}
/**
* Constructor.
*
* @param listener Type of listener for this proxy base.
* @param sdlProxyConfigurationResources Configuration resources for this proxy.
* @param enableAdvancedLifecycleManagement Flag that ALM should be enabled or not.
* @param appName Client application name.
* @param ttsName TTS name.
* @param ngnMediaScreenAppName Media Screen Application name.
* @param vrSynonyms List of synonyms.
* @param isMediaApp Flag that indicates that client application if media application or not.
* @param sdlMsgVersion Version of Sdl Message.
* @param languageDesired Desired language.
* @param hmiDisplayLanguageDesired Desired language for HMI.
* @param appType Type of application.
* @param appID Application identifier.
* @param autoActivateID Auto activation identifier.
* @param callbackToUIThread Flag that indicates that this proxy should send callback to UI thread or not.
* @param transportConfig Configuration of transport to be used by underlying connection.
* @throws SdlException
*/
protected SdlProxyBase(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, boolean callbackToUIThread, BaseTransportConfig transportConfig)
throws SdlException {
performBaseCommon(listener, sdlProxyConfigurationResources, enableAdvancedLifecycleManagement, appName, ttsName, ngnMediaScreenAppName, vrSynonyms, isMediaApp,
sdlMsgVersion, languageDesired, hmiDisplayLanguageDesired, appType, appID, autoActivateID, callbackToUIThread, null, null, null, transportConfig);
}
private void performBaseCommon(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, boolean callbackToUIThread, Boolean preRegister, String sHashID, Boolean bAppResumeEnab,
BaseTransportConfig transportConfig) throws SdlException
{
setWiProVersion((byte)PROX_PROT_VER_ONE);
if (preRegister != null && preRegister)
{
_appInterfaceRegisterd = preRegister;
_preRegisterd = preRegister;
}
if (bAppResumeEnab != null && bAppResumeEnab)
{
_bAppResumeEnabled = true;
_lastHashID = sHashID;
}
_interfaceBroker = new SdlInterfaceBroker();
_callbackToUIThread = callbackToUIThread;
if (_callbackToUIThread) {
_mainUIHandler = new Handler(Looper.getMainLooper());
}
// Set variables for Advanced Lifecycle Management
_advancedLifecycleManagementEnabled = enableAdvancedLifecycleManagement;
_applicationName = appName;
_ttsName = ttsName;
_ngnMediaScreenAppName = ngnMediaScreenAppName;
_isMediaApp = isMediaApp;
_sdlMsgVersionRequest = sdlMsgVersion;
_vrSynonyms = vrSynonyms;
_sdlLanguageDesired = languageDesired;
_hmiDisplayLanguageDesired = hmiDisplayLanguageDesired;
_appType = appType;
_appID = appID;
_autoActivateIdDesired = autoActivateID;
_transportConfig = transportConfig;
// Test conditions to invalidate the proxy
if (listener == null) {
throw new IllegalArgumentException("IProxyListener listener must be provided to instantiate SdlProxy object.");
}
if (_advancedLifecycleManagementEnabled) {
if (_isMediaApp == null) {
throw new IllegalArgumentException("isMediaApp must not be null when using SdlProxyALM.");
}
}
_proxyListener = listener;
// Get information from sdlProxyConfigurationResources
TelephonyManager telephonyManager = null;
if (sdlProxyConfigurationResources != null) {
telephonyManager = sdlProxyConfigurationResources.getTelephonyManager();
}
// Use the telephonyManager to get and log phone info
if (telephonyManager != null) {
// Following is not quite thread-safe (because m_traceLogger could test null twice),
// so we need to fix this, but vulnerability (i.e. two instances of listener) is
// likely harmless.
if (_traceDeviceInterrogator == null) {
_traceDeviceInterrogator = new TraceDeviceInfo(sdlProxyConfigurationResources.getTelephonyManager());
} // end-if
} // end-if
// Setup Internal ProxyMessage Dispatcher
synchronized(INTERNAL_MESSAGE_QUEUE_THREAD_LOCK) {
// Ensure internalProxyMessageDispatcher is null
if (_internalProxyMessageDispatcher != null) {
_internalProxyMessageDispatcher.dispose();
_internalProxyMessageDispatcher = null;
}
_internalProxyMessageDispatcher = new ProxyMessageDispatcher<InternalProxyMessage>("INTERNAL_MESSAGE_DISPATCHER",
new InternalProxyMessageComparitor(),
new IDispatchingStrategy<InternalProxyMessage>() {
@Override
public void dispatch(InternalProxyMessage message) {
dispatchInternalMessage((InternalProxyMessage)message);
}
@Override
public void handleDispatchingError(String info, Exception ex) {
handleErrorsFromInternalMessageDispatcher(info, ex);
}
@Override
public void handleQueueingError(String info, Exception ex) {
handleErrorsFromInternalMessageDispatcher(info, ex);
}
});
}
// Setup Incoming ProxyMessage Dispatcher
synchronized(INCOMING_MESSAGE_QUEUE_THREAD_LOCK) {
// Ensure incomingProxyMessageDispatcher is null
if (_incomingProxyMessageDispatcher != null) {
_incomingProxyMessageDispatcher.dispose();
_incomingProxyMessageDispatcher = null;
}
_incomingProxyMessageDispatcher = new ProxyMessageDispatcher<ProtocolMessage>("INCOMING_MESSAGE_DISPATCHER",
new IncomingProtocolMessageComparitor(),
new IDispatchingStrategy<ProtocolMessage>() {
@Override
public void dispatch(ProtocolMessage message) {
dispatchIncomingMessage((ProtocolMessage)message);
}
@Override
public void handleDispatchingError(String info, Exception ex) {
handleErrorsFromIncomingMessageDispatcher(info, ex);
}
@Override
public void handleQueueingError(String info, Exception ex) {
handleErrorsFromIncomingMessageDispatcher(info, ex);
}
});
}
// Setup Outgoing ProxyMessage Dispatcher
synchronized(OUTGOING_MESSAGE_QUEUE_THREAD_LOCK) {
// Ensure outgoingProxyMessageDispatcher is null
if (_outgoingProxyMessageDispatcher != null) {
_outgoingProxyMessageDispatcher.dispose();
_outgoingProxyMessageDispatcher = null;
}
_outgoingProxyMessageDispatcher = new ProxyMessageDispatcher<ProtocolMessage>("OUTGOING_MESSAGE_DISPATCHER",
new OutgoingProtocolMessageComparitor(),
new IDispatchingStrategy<ProtocolMessage>() {
@Override
public void dispatch(ProtocolMessage message) {
dispatchOutgoingMessage((ProtocolMessage)message);
}
@Override
public void handleDispatchingError(String info, Exception ex) {
handleErrorsFromOutgoingMessageDispatcher(info, ex);
}
@Override
public void handleQueueingError(String info, Exception ex) {
handleErrorsFromOutgoingMessageDispatcher(info, ex);
}
});
}
rpcUpdateListeners = new SparseArray<OnUpdateListener>();
// Initialize the proxy
try {
initializeProxy();
} catch (SdlException e) {
// Couldn't initialize the proxy
// Dispose threads and then rethrow exception
if (_internalProxyMessageDispatcher != null) {
_internalProxyMessageDispatcher.dispose();
_internalProxyMessageDispatcher = null;
}
if (_incomingProxyMessageDispatcher != null) {
_incomingProxyMessageDispatcher.dispose();
_incomingProxyMessageDispatcher = null;
}
if (_outgoingProxyMessageDispatcher != null) {
_outgoingProxyMessageDispatcher.dispose();
_outgoingProxyMessageDispatcher = null;
}
throw e;
}
// Trace that ctor has fired
SdlTrace.logProxyEvent("SdlProxy Created, instanceID=" + this.toString(), SDL_LIB_TRACE_KEY);
}
protected SdlProxyBase(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, boolean callbackToUIThread, boolean preRegister, String sHashID, Boolean bEnableResume, BaseTransportConfig transportConfig)
throws SdlException
{
performBaseCommon(listener, sdlProxyConfigurationResources, enableAdvancedLifecycleManagement, appName, ttsName, ngnMediaScreenAppName, vrSynonyms, isMediaApp,
sdlMsgVersion, languageDesired, hmiDisplayLanguageDesired, appType, appID, autoActivateID, callbackToUIThread, preRegister, sHashID, bEnableResume, transportConfig);
}
/**
* Constructor.
*
* @param listener Type of listener for this proxy base.
* @param sdlProxyConfigurationResources Configuration resources for this proxy.
* @param enableAdvancedLifecycleManagement Flag that ALM should be enabled or not.
* @param appName Client application name.
* @param ttsName TTS name.
* @param ngnMediaScreenAppName Media Screen Application name.
* @param vrSynonyms List of synonyms.
* @param isMediaApp Flag that indicates that client application if media application or not.
* @param sdlMsgVersion Version of Sdl Message.
* @param languageDesired Desired language.
* @param hmiDisplayLanguageDesired Desired language for HMI.
* @param appType Type of application.
* @param appID Application identifier.
* @param autoActivateID Auto activation identifier.
* @param callbackToUIThread Flag that indicates that this proxy should send callback to UI thread or not.
* @param preRegister Flag that indicates that this proxy should be pre-registerd or not.
* @param transportConfig Configuration of transport to be used by underlying connection.
* @throws SdlException
*/
protected SdlProxyBase(proxyListenerType listener, SdlProxyConfigurationResources sdlProxyConfigurationResources,
boolean enableAdvancedLifecycleManagement, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp, SdlMsgVersion sdlMsgVersion,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType, String appID,
String autoActivateID, boolean callbackToUIThread, boolean preRegister, BaseTransportConfig transportConfig)
throws SdlException
{
performBaseCommon(listener, sdlProxyConfigurationResources, enableAdvancedLifecycleManagement, appName, ttsName, ngnMediaScreenAppName, vrSynonyms, isMediaApp,
sdlMsgVersion, languageDesired, hmiDisplayLanguageDesired, appType, appID, autoActivateID, callbackToUIThread, preRegister, null, null, transportConfig);
}
private Intent createBroadcastIntent()
{
Intent sendIntent = new Intent();
sendIntent.setAction("com.smartdevicelink.broadcast");
sendIntent.putExtra("APP_NAME", this._applicationName);
sendIntent.putExtra("APP_ID", this._appID);
sendIntent.putExtra("RPC_NAME", "");
sendIntent.putExtra("TYPE", "");
sendIntent.putExtra("SUCCESS", true);
sendIntent.putExtra("CORRID", 0);
sendIntent.putExtra("FUNCTION_NAME", "");
sendIntent.putExtra("COMMENT1", "");
sendIntent.putExtra("COMMENT2", "");
sendIntent.putExtra("COMMENT3", "");
sendIntent.putExtra("COMMENT4", "");
sendIntent.putExtra("COMMENT5", "");
sendIntent.putExtra("COMMENT6", "");
sendIntent.putExtra("COMMENT7", "");
sendIntent.putExtra("COMMENT8", "");
sendIntent.putExtra("COMMENT9", "");
sendIntent.putExtra("COMMENT10", "");
sendIntent.putExtra("DATA", "");
sendIntent.putExtra("SHOW_ON_UI", true);
return sendIntent;
}
private void updateBroadcastIntent(Intent sendIntent, String sKey, String sValue)
{
if (sValue == null) sValue = "";
sendIntent.putExtra(sKey, sValue);
}
private void updateBroadcastIntent(Intent sendIntent, String sKey, boolean bValue)
{
sendIntent.putExtra(sKey, bValue);
}
private void updateBroadcastIntent(Intent sendIntent, String sKey, int iValue)
{
sendIntent.putExtra(sKey, iValue);
}
private void sendBroadcastIntent(Intent sendIntent)
{
Service myService = null;
if (_proxyListener != null && _proxyListener instanceof Service)
{
myService = (Service) _proxyListener;
}
else if (_appService != null)
{
myService = _appService;
}
else
{
return;
}
try
{
Context myContext = myService.getApplicationContext();
if (myContext != null) myContext.sendBroadcast(sendIntent);
}
catch(Exception ex)
{
//If the service or context has become unavailable unexpectedly, catch the exception and move on -- no broadcast log will occur.
}
}
private void writeToFile(Object writeME, String fileName) {
Intent sendIntent = createBroadcastIntent();
try {
updateBroadcastIntent(sendIntent,"FUNCTION_NAME", "writeToFile");
updateBroadcastIntent(sendIntent, "SHOW_ON_UI", false);
String sFileName = fileName + "_" + iFileCount + ".txt";
String outFile = Environment.getExternalStorageDirectory().getPath() + "/" + sFileName;
File out = new File(outFile);
FileWriter writer = new FileWriter(out);
writer.flush();
writer.write(writeME.toString());
writer.close();
updateBroadcastIntent(sendIntent, "COMMENT1", outFile);
} catch (FileNotFoundException e) {
updateBroadcastIntent(sendIntent, "COMMENT2", "writeToFile FileNotFoundException " + e);
Log.i("sdlp", "FileNotFoundException: " + e);
e.printStackTrace();
} catch (IOException e) {
updateBroadcastIntent(sendIntent, "COMMENT2", "writeToFile IOException " + e);
Log.i("sdlp", "IOException: " + e);
e.printStackTrace();
}
finally
{
sendBroadcastIntent(sendIntent);
}
}
private void LogHeader(String sType, final String myObject, String sFuncName)
{
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", sFuncName);
updateBroadcastIntent(sendIntent, "COMMENT1", sType + "\r\n");
updateBroadcastIntent(sendIntent, "DATA", myObject);
sendBroadcastIntent(sendIntent);
}
private HttpURLConnection getURLConnection(Headers myHeader, String sURLString, int Timeout, int iContentLen)
{
String sContentType = "application/json";
int CONNECTION_TIMEOUT = Timeout * 1000;
int READ_TIMEOUT = Timeout * 1000;
boolean bDoOutput = true;
boolean bDoInput = true;
boolean bUsesCaches = false;
String sRequestMeth = "POST";
boolean bInstFolRed = false;
String sCharSet = "utf-8";
int iContentLength = iContentLen;
URL url = null;
HttpURLConnection urlConnection = null;
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "getURLConnection");
updateBroadcastIntent(sendIntent, "COMMENT1", "Actual Content Length: " + iContentLen);
if (myHeader != null)
{
//if the header isn't null, use it and replace the hardcoded params
int iTimeout;
int iReadTimeout;
sContentType = myHeader.getContentType();
iTimeout = myHeader.getConnectTimeout();
bDoOutput = myHeader.getDoOutput();
bDoInput = myHeader.getDoInput();
bUsesCaches = myHeader.getUseCaches();
sRequestMeth = myHeader.getRequestMethod();
iReadTimeout = myHeader.getReadTimeout();
bInstFolRed = myHeader.getInstanceFollowRedirects();
sCharSet = myHeader.getCharset();
iContentLength = myHeader.getContentLength();
CONNECTION_TIMEOUT = iTimeout*1000;
READ_TIMEOUT = iReadTimeout*1000;
updateBroadcastIntent(sendIntent, "COMMENT2", "\nHeader Defined Content Length: " + iContentLength);
}
try
{
url = new URL(sURLString);
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setConnectTimeout(CONNECTION_TIMEOUT);
urlConnection.setDoOutput(bDoOutput);
urlConnection.setDoInput(bDoInput);
urlConnection.setRequestMethod(sRequestMeth);
urlConnection.setReadTimeout(READ_TIMEOUT);
urlConnection.setInstanceFollowRedirects(bInstFolRed);
urlConnection.setRequestProperty("Content-Type", sContentType);
urlConnection.setRequestProperty("charset", sCharSet);
urlConnection.setRequestProperty("Content-Length", "" + Integer.toString(iContentLength));
urlConnection.setUseCaches(bUsesCaches);
return urlConnection;
}
catch (Exception e)
{
return null;
}
finally
{
sendBroadcastIntent(sendIntent);
}
}
private void sendOnSystemRequestToUrl(OnSystemRequest msg)
{
Intent sendIntent = createBroadcastIntent();
Intent sendIntent2 = createBroadcastIntent();
HttpURLConnection urlConnection = null;
boolean bLegacy = false;
String sURLString;
if (!getPoliciesURL().equals(""))
sURLString = sPoliciesURL;
else
sURLString = msg.getUrl();
Integer iTimeout = msg.getTimeout();
if (iTimeout == null)
iTimeout = 2000;
Headers myHeader = msg.getHeader();
String sFunctionName = "SYSTEM_REQUEST";
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "sendOnSystemRequestToUrl");
updateBroadcastIntent(sendIntent, "COMMENT5", "\r\nCloud URL: " + sURLString);
try
{
if (myHeader == null)
updateBroadcastIntent(sendIntent, "COMMENT7", "\r\nHTTPRequest Header is null");
String sBodyString = msg.getBody();
JSONObject jsonObjectToSendToServer;
String valid_json;
if (sBodyString == null)
{
List<String> legacyData = msg.getLegacyData();
JSONArray jsonArrayOfSdlPPackets = new JSONArray(legacyData);
jsonObjectToSendToServer = new JSONObject();
jsonObjectToSendToServer.put("data", jsonArrayOfSdlPPackets);
bLegacy = true;
sFunctionName = "SYSTEM_REQUEST_LEGACY";
updateBroadcastIntent(sendIntent, "COMMENT6", "\r\nLegacy SystemRequest: true");
valid_json = jsonObjectToSendToServer.toString().replace("\\", "");
}
else
{
Intent sendIntent3 = createBroadcastIntent();
updateBroadcastIntent(sendIntent3, "FUNCTION_NAME", "replace");
updateBroadcastIntent(sendIntent3, "COMMENT1", "Valid Json length before replace: " + sBodyString.getBytes("UTF-8").length);
sendBroadcastIntent(sendIntent3);
valid_json = sBodyString.replace("\\", "");
}
writeToFile(valid_json, "requestToCloud");
LogHeader("Cloud Request", valid_json, sFunctionName);
urlConnection = getURLConnection(myHeader, sURLString, iTimeout, valid_json.getBytes("UTF-8").length);
if (urlConnection == null)
{
Log.i(TAG, "urlConnection is null, check RPC input parameters");
updateBroadcastIntent(sendIntent, "COMMENT2", "urlConnection is null, check RPC input parameters");
return;
}
DataOutputStream wr = new DataOutputStream(urlConnection.getOutputStream());
wr.writeBytes(valid_json);
wr.flush();
wr.close();
long BeforeTime = System.currentTimeMillis();
@SuppressWarnings("unused")
String sResponseMsg = urlConnection.getResponseMessage();
long AfterTime = System.currentTimeMillis();
final long roundtriptime = AfterTime - BeforeTime;
updateBroadcastIntent(sendIntent, "COMMENT4", " Round trip time: " + roundtriptime);
updateBroadcastIntent(sendIntent, "COMMENT1", "Received response from cloud, response code=" + urlConnection.getResponseCode() + " ");
int iResponseCode = urlConnection.getResponseCode();
if (iResponseCode != HttpURLConnection.HTTP_OK)
{
Log.i(TAG, "Response code not HTTP_OK, returning from sendOnSystemRequestToUrl.");
updateBroadcastIntent(sendIntent, "COMMENT2", "Response code not HTTP_OK, aborting request. ");
return;
}
InputStream is = urlConnection.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
StringBuffer response = new StringBuffer();
while((line = rd.readLine()) != null)
{
response.append(line);
response.append('\r');
}
rd.close();
Log.i(TAG, "response: " + response.toString());
writeToFile(response.toString(), "responseFromCloud");
LogHeader("Cloud Response", response.toString(), sFunctionName);
Vector<String> cloudDataReceived = new Vector<String>();
// Convert the response to JSON
JSONObject jsonResponse = new JSONObject(response.toString());
if (jsonResponse.get("data") instanceof JSONArray)
{
JSONArray jsonArray = jsonResponse.getJSONArray("data");
for (int i=0; i<jsonArray.length(); i++)
{
if (jsonArray.get(i) instanceof String)
{
cloudDataReceived.add(jsonArray.getString(i));
//Log.i("sendOnSystemRequestToUrl", "jsonArray.getString(i): " + jsonArray.getString(i));
}
}
}
else if (jsonResponse.get("data") instanceof String)
{
cloudDataReceived.add(jsonResponse.getString("data"));
//Log.i("sendOnSystemRequestToUrl", "jsonResponse.getString(data): " + jsonResponse.getString("data"));
}
else
{
DebugTool.logError("sendOnSystemRequestToUrl: Data in JSON Object neither an array nor a string.");
//Log.i("sendOnSystemRequestToUrl", "sendOnSystemRequestToUrl: Data in JSON Object neither an array nor a string.");
return;
}
String sResponse = cloudDataReceived.toString();
if (sResponse.length() > 512)
{
sResponse = sResponse.substring(0, 511);
}
updateBroadcastIntent(sendIntent, "DATA", "Data from cloud response: " + sResponse);
// Send new SystemRequest to SDL
SystemRequest mySystemRequest;
if (bLegacy)
mySystemRequest = RPCRequestFactory.buildSystemRequestLegacy(cloudDataReceived, getPoliciesReservedCorrelationID());
else
mySystemRequest = RPCRequestFactory.buildSystemRequest(response.toString(), getPoliciesReservedCorrelationID());
if (getIsConnected())
{
sendRPCRequestPrivate(mySystemRequest);
Log.i("sendOnSystemRequestToUrl", "sent to sdl");
updateBroadcastIntent(sendIntent2, "RPC_NAME", FunctionID.SYSTEM_REQUEST);
updateBroadcastIntent(sendIntent2, "TYPE", RPCMessage.KEY_REQUEST);
updateBroadcastIntent(sendIntent2, "CORRID", mySystemRequest.getCorrelationID());
}
}
catch (SdlException e)
{
DebugTool.logError("sendOnSystemRequestToUrl: Could not get data from JSONObject received.", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " SdlException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendOnSystemRequestToUrl: Could not get data from JSONObject received."+ e);
}
catch (JSONException e)
{
DebugTool.logError("sendOnSystemRequestToUrl: JSONException: ", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " JSONException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendOnSystemRequestToUrl: JSONException: "+ e);
}
catch (UnsupportedEncodingException e)
{
DebugTool.logError("sendOnSystemRequestToUrl: Could not encode string.", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " UnsupportedEncodingException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendOnSystemRequestToUrl: Could not encode string."+ e);
}
catch (ProtocolException e)
{
DebugTool.logError("sendOnSystemRequestToUrl: Could not set request method to post.", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " ProtocolException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendOnSystemRequestToUrl: Could not set request method to post."+ e);
}
catch (MalformedURLException e)
{
DebugTool.logError("sendOnSystemRequestToUrl: URL Exception when sending SystemRequest to an external server.", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " MalformedURLException encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendOnSystemRequestToUrl: URL Exception when sending SystemRequest to an external server."+ e);
}
catch (IOException e)
{
DebugTool.logError("sendOnSystemRequestToUrl: IOException: ", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " IOException while sending to cloud: IOException: "+ e);
//Log.i("pt", "sendOnSystemRequestToUrl: IOException: "+ e);
}
catch (Exception e)
{
DebugTool.logError("sendOnSystemRequestToUrl: Unexpected Exception: ", e);
updateBroadcastIntent(sendIntent, "COMMENT3", " Exception encountered sendOnSystemRequestToUrl: "+ e);
//Log.i("pt", "sendOnSystemRequestToUrl: Unexpected Exception: " + e);
}
finally
{
sendBroadcastIntent(sendIntent);
sendBroadcastIntent(sendIntent2);
if (iFileCount < 10)
iFileCount++;
else
iFileCount = 0;
if(urlConnection != null)
{
urlConnection.disconnect();
}
}
}
private int getPoliciesReservedCorrelationID() {
return POLICIES_CORRELATION_ID;
}
// Test correlationID
private boolean isCorrelationIDProtected(Integer correlationID) {
if (correlationID != null &&
(HEARTBEAT_CORRELATION_ID == correlationID
|| REGISTER_APP_INTERFACE_CORRELATION_ID == correlationID
|| UNREGISTER_APP_INTERFACE_CORRELATION_ID == correlationID
|| POLICIES_CORRELATION_ID == correlationID)) {
return true;
}
return false;
}
// Protected isConnected method to allow legacy proxy to poll isConnected state
public Boolean getIsConnected() {
if (sdlSession == null) return false;
return sdlSession.getIsConnected();
}
/**
* Returns whether the application is registered in SDL. Note: for testing
* purposes, it's possible that the connection is established, but the
* application is not registered.
*
* @return true if the application is registered in SDL
*/
public Boolean getAppInterfaceRegistered() {
return _appInterfaceRegisterd;
}
// Function to initialize new proxy connection
private void initializeProxy() throws SdlException {
// Reset all of the flags and state variables
_haveReceivedFirstNonNoneHMILevel = false;
_haveReceivedFirstFocusLevel = false;
_haveReceivedFirstFocusLevelFull = false;
if (_preRegisterd)
_appInterfaceRegisterd = true;
else
_appInterfaceRegisterd = false;
_sdlIntefaceAvailablity = SdlInterfaceAvailability.SDL_INTERFACE_UNAVAILABLE;
// Setup SdlConnection
synchronized(CONNECTION_REFERENCE_LOCK) {
this.sdlSession = SdlSession.createSession(_wiproVersion,_interfaceBroker, _transportConfig);
}
synchronized(CONNECTION_REFERENCE_LOCK) {
this.sdlSession.startSession();
sendTransportBroadcast();
}
}
public void sendTransportBroadcast()
{
if (sdlSession == null || _transportConfig == null) return;
String sTransComment = sdlSession.getBroadcastComment(_transportConfig);
if (sTransComment == null || sTransComment.equals("")) return;
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "initializeProxy");
updateBroadcastIntent(sendIntent, "COMMENT1", sTransComment);
sendBroadcastIntent(sendIntent);
}
/**
* Public method to enable the siphon transport
*/
public void enableSiphonDebug() {
short enabledPortNumber = SiphonServer.enableSiphonServer();
String sSiphonPortNumber = "Enabled Siphon Port Number: " + enabledPortNumber;
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "enableSiphonDebug");
updateBroadcastIntent(sendIntent, "COMMENT1", sSiphonPortNumber);
sendBroadcastIntent(sendIntent);
}
/**
* Public method to disable the Siphon Trace Server
*/
public void disableSiphonDebug() {
short disabledPortNumber = SiphonServer.disableSiphonServer();
if (disabledPortNumber != -1) {
String sSiphonPortNumber = "Disabled Siphon Port Number: " + disabledPortNumber;
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "disableSiphonDebug");
updateBroadcastIntent(sendIntent, "COMMENT1", sSiphonPortNumber);
sendBroadcastIntent(sendIntent);
}
}
/**
* Public method to enable the Debug Tool
*/
public static void enableDebugTool() {
DebugTool.enableDebugTool();
}
/**
* Public method to disable the Debug Tool
*/
public static void disableDebugTool() {
DebugTool.disableDebugTool();
}
/**
* Public method to determine Debug Tool enabled
*/
public static boolean isDebugEnabled() {
return DebugTool.isDebugEnabled();
}
@Deprecated
public void close() throws SdlException {
dispose();
}
private void cleanProxy(SdlDisconnectedReason disconnectedReason) throws SdlException {
try {
// ALM Specific Cleanup
if (_advancedLifecycleManagementEnabled) {
_sdlConnectionState = SdlConnectionState.SDL_DISCONNECTED;
firstTimeFull = true;
// Should we wait for the interface to be unregistered?
Boolean waitForInterfaceUnregistered = false;
// Unregister app interface
synchronized(CONNECTION_REFERENCE_LOCK) {
if (sdlSession != null && sdlSession.getIsConnected() && getAppInterfaceRegistered()) {
waitForInterfaceUnregistered = true;
unregisterAppInterfacePrivate(UNREGISTER_APP_INTERFACE_CORRELATION_ID);
}
}
// Wait for the app interface to be unregistered
if (waitForInterfaceUnregistered) {
synchronized(APP_INTERFACE_REGISTERED_LOCK) {
try {
APP_INTERFACE_REGISTERED_LOCK.wait(3000);
} catch (InterruptedException e) {
// Do nothing
}
}
}
}
rpcUpdateListeners.clear();
// Clean up SDL Connection
synchronized(CONNECTION_REFERENCE_LOCK) {
if (sdlSession != null) sdlSession.close();
}
} catch (SdlException e) {
throw e;
} finally {
SdlTrace.logProxyEvent("SdlProxy cleaned.", SDL_LIB_TRACE_KEY);
}
}
/**
* Terminates the App's Interface Registration, closes the transport connection, ends the protocol session, and frees any resources used by the proxy.
*/
public void dispose() throws SdlException
{
if (_proxyDisposed) {
throw new SdlException("This object has been disposed, it is no long capable of executing methods.", SdlExceptionCause.SDL_PROXY_DISPOSED);
}
_proxyDisposed = true;
SdlTrace.logProxyEvent("Application called dispose() method.", SDL_LIB_TRACE_KEY);
try{
// Clean the proxy
cleanProxy(SdlDisconnectedReason.APPLICATION_REQUESTED_DISCONNECT);
// Close IncomingProxyMessageDispatcher thread
synchronized(INCOMING_MESSAGE_QUEUE_THREAD_LOCK) {
if (_incomingProxyMessageDispatcher != null) {
_incomingProxyMessageDispatcher.dispose();
_incomingProxyMessageDispatcher = null;
}
}
// Close OutgoingProxyMessageDispatcher thread
synchronized(OUTGOING_MESSAGE_QUEUE_THREAD_LOCK) {
if (_outgoingProxyMessageDispatcher != null) {
_outgoingProxyMessageDispatcher.dispose();
_outgoingProxyMessageDispatcher = null;
}
}
// Close InternalProxyMessageDispatcher thread
synchronized(INTERNAL_MESSAGE_QUEUE_THREAD_LOCK) {
if (_internalProxyMessageDispatcher != null) {
_internalProxyMessageDispatcher.dispose();
_internalProxyMessageDispatcher = null;
}
}
_traceDeviceInterrogator = null;
rpcUpdateListeners = null;
} catch (SdlException e) {
throw e;
} finally {
SdlTrace.logProxyEvent("SdlProxy disposed.", SDL_LIB_TRACE_KEY);
}
} // end-method
private static Object CYCLE_LOCK = new Object();
private boolean _cycling = false;
// Method to cycle the proxy, only called in ALM
protected void cycleProxy(SdlDisconnectedReason disconnectedReason) {
if (_cycling) return;
synchronized(CYCLE_LOCK)
{
try{
_cycling = true;
cleanProxy(disconnectedReason);
initializeProxy();
notifyProxyClosed("Sdl Proxy Cycled", new SdlException("Sdl Proxy Cycled", SdlExceptionCause.SDL_PROXY_CYCLED), disconnectedReason);
}
catch (SdlException e) {
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "cycleProxy");
updateBroadcastIntent(sendIntent, "COMMENT1", "Proxy cycled, exception cause: " + e.getSdlExceptionCause());
sendBroadcastIntent(sendIntent);
switch(e.getSdlExceptionCause()) {
case BLUETOOTH_DISABLED:
notifyProxyClosed("Bluetooth is disabled. Bluetooth must be enabled to connect to SDL. Reattempt a connection once Bluetooth is enabled.",
new SdlException("Bluetooth is disabled. Bluetooth must be enabled to connect to SDL. Reattempt a connection once Bluetooth is enabled.", SdlExceptionCause.BLUETOOTH_DISABLED), SdlDisconnectedReason.BLUETOOTH_DISABLED);
break;
case BLUETOOTH_ADAPTER_NULL:
notifyProxyClosed("Cannot locate a Bluetooth adapater. A SDL connection is impossible on this device until a Bluetooth adapter is added.",
new SdlException("Cannot locate a Bluetooth adapater. A SDL connection is impossible on this device until a Bluetooth adapter is added.", SdlExceptionCause.BLUETOOTH_ADAPTER_NULL), SdlDisconnectedReason.BLUETOOTH_ADAPTER_ERROR);
break;
default :
notifyProxyClosed("Cycling the proxy failed.", e, SdlDisconnectedReason.GENERIC_ERROR);
break;
}
} catch (Exception e) {
notifyProxyClosed("Cycling the proxy failed.", e, SdlDisconnectedReason.GENERIC_ERROR);
}
_cycling = false;
}
}
private void dispatchIncomingMessage(ProtocolMessage message) {
try{
// Dispatching logic
if (message.getSessionType().equals(SessionType.RPC)) {
try {
if (_wiproVersion == 1) {
if (message.getVersion() > 1) setWiProVersion(message.getVersion());
}
Hashtable<String, Object> hash = new Hashtable<String, Object>();
if (_wiproVersion > 1) {
Hashtable<String, Object> hashTemp = new Hashtable<String, Object>();
hashTemp.put(RPCMessage.KEY_CORRELATION_ID, message.getCorrID());
if (message.getJsonSize() > 0) {
final Hashtable<String, Object> mhash = JsonRPCMarshaller.unmarshall(message.getData());
//hashTemp.put(Names.parameters, mhash.get(Names.parameters));
hashTemp.put(RPCMessage.KEY_PARAMETERS, mhash);
}
String functionName = FunctionID.getFunctionName(message.getFunctionID());
if (functionName != null) {
hashTemp.put(RPCMessage.KEY_FUNCTION_NAME, functionName);
} else {
DebugTool.logWarning("Dispatch Incoming Message - function name is null unknown RPC. FunctionID: " + message.getFunctionID());
return;
}
if (message.getRPCType() == 0x00) {
hash.put(RPCMessage.KEY_REQUEST, hashTemp);
} else if (message.getRPCType() == 0x01) {
hash.put(RPCMessage.KEY_RESPONSE, hashTemp);
} else if (message.getRPCType() == 0x02) {
hash.put(RPCMessage.KEY_NOTIFICATION, hashTemp);
}
if (message.getBulkData() != null) hash.put(RPCStruct.KEY_BULK_DATA, message.getBulkData());
} else {
final Hashtable<String, Object> mhash = JsonRPCMarshaller.unmarshall(message.getData());
hash = mhash;
}
handleRPCMessage(hash);
} catch (final Exception excp) {
DebugTool.logError("Failure handling protocol message: " + excp.toString(), excp);
passErrorToProxyListener("Error handing incoming protocol message.", excp);
} // end-catch
} else {
// Handle other protocol message types here
}
} catch (final Exception e) {
// Pass error to application through listener
DebugTool.logError("Error handing proxy event.", e);
passErrorToProxyListener("Error handing incoming protocol message.", e);
}
}
private byte getWiProVersion() {
return this._wiproVersion;
}
private void setWiProVersion(byte version) {
this._wiproVersion = version;
}
public String serializeJSON(RPCMessage msg)
{
String sReturn = null;
try
{
sReturn = msg.serializeJSON(getWiProVersion()).toString(2);
}
catch (final Exception e)
{
DebugTool.logError("Error handing proxy event.", e);
passErrorToProxyListener("Error serializing message.", e);
return null;
}
return sReturn;
}
private void handleErrorsFromIncomingMessageDispatcher(String info, Exception e) {
passErrorToProxyListener(info, e);
}
private void dispatchOutgoingMessage(ProtocolMessage message) {
synchronized(CONNECTION_REFERENCE_LOCK) {
if (sdlSession != null) {
sdlSession.sendMessage(message);
}
}
SdlTrace.logProxyEvent("SdlProxy sending Protocol Message: " + message.toString(), SDL_LIB_TRACE_KEY);
}
private void handleErrorsFromOutgoingMessageDispatcher(String info, Exception e) {
passErrorToProxyListener(info, e);
}
void dispatchInternalMessage(final InternalProxyMessage message) {
try{
if (message.getFunctionName().equals(InternalProxyMessage.OnProxyError)) {
final OnError msg = (OnError)message;
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onError(msg.getInfo(), msg.getException());
}
});
} else {
_proxyListener.onError(msg.getInfo(), msg.getException());
}
} else if (message.getFunctionName().equals(InternalProxyMessage.OnProxyOpened)) {
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
((IProxyListener)_proxyListener).onProxyOpened();
}
});
} else {
((IProxyListener)_proxyListener).onProxyOpened();
}
} else if (message.getFunctionName().equals(InternalProxyMessage.OnProxyClosed)) {
final OnProxyClosed msg = (OnProxyClosed)message;
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onProxyClosed(msg.getInfo(), msg.getException(), msg.getReason());
}
});
} else {
_proxyListener.onProxyClosed(msg.getInfo(), msg.getException(), msg.getReason());
}
} else {
// Diagnostics
SdlTrace.logProxyEvent("Unknown RPC Message encountered. Check for an updated version of the SDL Proxy.", SDL_LIB_TRACE_KEY);
DebugTool.logError("Unknown RPC Message encountered. Check for an updated version of the SDL Proxy.");
}
SdlTrace.logProxyEvent("Proxy fired callback: " + message.getFunctionName(), SDL_LIB_TRACE_KEY);
} catch(final Exception e) {
// Pass error to application through listener
DebugTool.logError("Error handing proxy event.", e);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onError("Error handing proxy event.", e);
}
});
} else {
_proxyListener.onError("Error handing proxy event.", e);
}
}
}
private void handleErrorsFromInternalMessageDispatcher(String info, Exception e) {
DebugTool.logError(info, e);
// This error cannot be passed to the user, as it indicates an error
// in the communication between the proxy and the application.
DebugTool.logError("InternalMessageDispatcher failed.", e);
// Note, this is the only place where the _proxyListener should be referenced asdlhronously,
// with an error on the internalMessageDispatcher, we have no other reliable way of
// communicating with the application.
notifyProxyClosed("Proxy callback dispatcher is down. Proxy instance is invalid.", e, SdlDisconnectedReason.GENERIC_ERROR);
_proxyListener.onError("Proxy callback dispatcher is down. Proxy instance is invalid.", e);
}
// Private sendPRCRequest method. All RPCRequests are funneled through this method after
// error checking.
private void sendRPCRequestPrivate(RPCRequest request) throws SdlException {
try {
SdlTrace.logRPCEvent(InterfaceActivityDirection.Transmit, request, SDL_LIB_TRACE_KEY);
byte[] msgBytes = JsonRPCMarshaller.marshall(request, _wiproVersion);
ProtocolMessage pm = new ProtocolMessage();
pm.setData(msgBytes);
if (sdlSession != null)
pm.setSessionID(sdlSession.getSessionId());
pm.setMessageType(MessageType.RPC);
pm.setSessionType(SessionType.RPC);
pm.setFunctionID(FunctionID.getFunctionID(request.getFunctionName()));
if (request.getCorrelationID() == null)
{
//Log error here
throw new SdlException("CorrelationID cannot be null. RPC: " + request.getFunctionName(), SdlExceptionCause.INVALID_ARGUMENT);
}
pm.setCorrID(request.getCorrelationID());
if (request.getBulkData() != null)
pm.setBulkData(request.getBulkData());
// Queue this outgoing message
synchronized(OUTGOING_MESSAGE_QUEUE_THREAD_LOCK) {
if (_outgoingProxyMessageDispatcher != null) {
_outgoingProxyMessageDispatcher.queueMessage(pm);
//Since the message is queued we can add it's listener to our list
OnUpdateListener listener = request.getOnUpdateListener();
if(request.getMessageType() == RPCMessage.KEY_REQUEST){//We might want to include other message types in the future
addOnUpdateListener(listener, request.getCorrelationID(), msgBytes.length);
}
}
}
} catch (OutOfMemoryError e) {
SdlTrace.logProxyEvent("OutOfMemory exception while sending request " + request.getFunctionName(), SDL_LIB_TRACE_KEY);
throw new SdlException("OutOfMemory exception while sending request " + request.getFunctionName(), e, SdlExceptionCause.INVALID_ARGUMENT);
}
}
private void onPacketStart(int correlationId, long totalSize){ //May not need this
synchronized(ON_UPDATE_LISTENER_LOCK){
if(rpcUpdateListeners !=null
&& rpcUpdateListeners.indexOfKey(correlationId)>=0){
rpcUpdateListeners.get(correlationId).onStart(correlationId, totalSize);
}
}
}
public void onPacketProgress(int correlationId, long bytesWritten, long totalSize){
synchronized(ON_UPDATE_LISTENER_LOCK){
if(rpcUpdateListeners !=null
&& rpcUpdateListeners.indexOfKey(correlationId)>=0){
rpcUpdateListeners.get(correlationId).onUpdate(correlationId, bytesWritten, totalSize);
}
}
}
/**
* Will provide callback to the listener either onFinish or onError depending on the RPCResponses result code,
* <p>Will automatically remove the listener for the list of listeners on completion.
* @param msg
* @return if a listener was called or not
*/
private boolean onPacketFinish(RPCResponse msg){
synchronized(ON_UPDATE_LISTENER_LOCK){
int correlationId = msg.getCorrelationID();
if(rpcUpdateListeners !=null
&& rpcUpdateListeners.indexOfKey(correlationId)>=0){
OnUpdateListener listener = rpcUpdateListeners.get(correlationId);
if(msg.getSuccess()){
listener.onFinish(correlationId, msg, listener.totalSize);
}else{
listener.onError(correlationId, msg.getResultCode(), msg.getInfo());
}
rpcUpdateListeners.remove(correlationId);
return true;
}
return false;
}
}
public void addOnUpdateListener(OnUpdateListener listener,int correlationId, int totalSize){
synchronized(ON_UPDATE_LISTENER_LOCK){
if(rpcUpdateListeners!=null
&& listener !=null){
listener.setTotalSize(totalSize);
listener.onStart(correlationId, totalSize);
rpcUpdateListeners.put(correlationId, listener);
}
}
}
public SparseArray<OnUpdateListener> getProgressListeners(){
synchronized(ON_UPDATE_LISTENER_LOCK){
return this.rpcUpdateListeners;
}
}
private void handleRPCMessage(Hashtable<String, Object> hash) {
RPCMessage rpcMsg = new RPCMessage(hash);
String functionName = rpcMsg.getFunctionName();
String messageType = rpcMsg.getMessageType();
if (messageType.equals(RPCMessage.KEY_RESPONSE)) {
SdlTrace.logRPCEvent(InterfaceActivityDirection.Receive, new RPCResponse(rpcMsg), SDL_LIB_TRACE_KEY);
// Check to ensure response is not from an internal message (reserved correlation ID)
if (isCorrelationIDProtected((new RPCResponse(hash)).getCorrelationID())) {
// This is a response generated from an internal message, it can be trapped here
// The app should not receive a response for a request it did not send
if ((new RPCResponse(hash)).getCorrelationID() == REGISTER_APP_INTERFACE_CORRELATION_ID
&& _advancedLifecycleManagementEnabled
&& functionName.equals(FunctionID.REGISTER_APP_INTERFACE)) {
final RegisterAppInterfaceResponse msg = new RegisterAppInterfaceResponse(hash);
if (msg.getSuccess()) {
_appInterfaceRegisterd = true;
}
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.REGISTER_APP_INTERFACE);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
//_autoActivateIdReturned = msg.getAutoActivateID();
/*Place holder for legacy support*/ _autoActivateIdReturned = "8675309";
_buttonCapabilities = msg.getButtonCapabilities();
_displayCapabilities = msg.getDisplayCapabilities();
_softButtonCapabilities = msg.getSoftButtonCapabilities();
_presetBankCapabilities = msg.getPresetBankCapabilities();
_hmiZoneCapabilities = msg.getHmiZoneCapabilities();
_speechCapabilities = msg.getSpeechCapabilities();
_prerecordedSpeech = msg.getPrerecordedSpeech();
_sdlLanguage = msg.getLanguage();
_hmiDisplayLanguage = msg.getHmiDisplayLanguage();
_sdlMsgVersion = msg.getSdlMsgVersion();
_vrCapabilities = msg.getVrCapabilities();
_vehicleType = msg.getVehicleType();
_audioPassThruCapabilities = msg.getAudioPassThruCapabilities();
_proxyVersionInfo = msg.getProxyVersionInfo();
if (_bAppResumeEnabled)
{
if ( (msg.getResultCode() == Result.RESUME_FAILED) || (msg.getResultCode() != Result.SUCCESS) )
{
_bResumeSuccess = false;
_lastHashID = null;
}
else if ( (_sdlMsgVersion.getMajorVersion() > 2) && (_lastHashID != null) && (msg.getResultCode() == Result.SUCCESS) )
_bResumeSuccess = true;
}
_diagModes = msg.getSupportedDiagModes();
String sVersionInfo = "SDL Proxy Version: " + _proxyVersionInfo;
if (!isDebugEnabled())
{
enableDebugTool();
DebugTool.logInfo(sVersionInfo, false);
disableDebugTool();
}
else
DebugTool.logInfo(sVersionInfo, false);
sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "FUNCTION_NAME", "RAI_RESPONSE");
updateBroadcastIntent(sendIntent, "COMMENT1", sVersionInfo);
sendBroadcastIntent(sendIntent);
// Send onSdlConnected message in ALM
_sdlConnectionState = SdlConnectionState.SDL_CONNECTED;
// If registerAppInterface failed, exit with OnProxyUnusable
if (!msg.getSuccess()) {
notifyProxyClosed("Unable to register app interface. Review values passed to the SdlProxy constructor. RegisterAppInterface result code: ",
new SdlException("Unable to register app interface. Review values passed to the SdlProxy constructor. RegisterAppInterface result code: " + msg.getResultCode(), SdlExceptionCause.SDL_REGISTRATION_ERROR), SdlDisconnectedReason.SDL_REGISTRATION_ERROR);
}
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onRegisterAppInterfaceResponse(msg);
} else if (_proxyListener instanceof IProxyListenerALM) {
//((IProxyListenerALM)_proxyListener).onRegisterAppInterfaceResponse(msg);
}
onPacketFinish(msg);
}
});
} else {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onRegisterAppInterfaceResponse(msg);
} else if (_proxyListener instanceof IProxyListenerALM) {
//((IProxyListenerALM)_proxyListener).onRegisterAppInterfaceResponse(msg);
}
onPacketFinish(msg);
}
} else if ((new RPCResponse(hash)).getCorrelationID() == POLICIES_CORRELATION_ID
&& functionName.equals(FunctionID.ON_ENCODED_SYNC_P_DATA)) {
Log.i("pt", "POLICIES_CORRELATION_ID SystemRequest Notification (Legacy)");
final OnSystemRequest msg = new OnSystemRequest(hash);
// If url is not null, then send to URL
if ( (msg.getUrl() != null) )
{
// URL has data, attempt to post request to external server
Thread handleOffboardTransmissionThread = new Thread() {
@Override
public void run() {
sendOnSystemRequestToUrl(msg);
}
};
handleOffboardTransmissionThread.start();
}
}
else if ((new RPCResponse(hash)).getCorrelationID() == POLICIES_CORRELATION_ID
&& functionName.equals(FunctionID.ENCODED_SYNC_P_DATA)) {
Log.i("pt", "POLICIES_CORRELATION_ID SystemRequest Response (Legacy)");
final SystemRequestResponse msg = new SystemRequestResponse(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.SYSTEM_REQUEST);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
}
else if ((new RPCResponse(hash)).getCorrelationID() == POLICIES_CORRELATION_ID
&& functionName.equals(FunctionID.SYSTEM_REQUEST)) {
final SystemRequestResponse msg = new SystemRequestResponse(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.SYSTEM_REQUEST);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
updateBroadcastIntent(sendIntent, "DATA", serializeJSON(msg));
sendBroadcastIntent(sendIntent);
}
else if (functionName.equals(FunctionID.UNREGISTER_APP_INTERFACE)) {
// UnregisterAppInterface
_appInterfaceRegisterd = false;
synchronized(APP_INTERFACE_REGISTERED_LOCK) {
APP_INTERFACE_REGISTERED_LOCK.notify();
}
final UnregisterAppInterfaceResponse msg = new UnregisterAppInterfaceResponse(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.UNREGISTER_APP_INTERFACE);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
}
return;
}
if (functionName.equals(FunctionID.REGISTER_APP_INTERFACE)) {
final RegisterAppInterfaceResponse msg = new RegisterAppInterfaceResponse(hash);
if (msg.getSuccess()) {
_appInterfaceRegisterd = true;
}
//_autoActivateIdReturned = msg.getAutoActivateID();
/*Place holder for legacy support*/ _autoActivateIdReturned = "8675309";
_buttonCapabilities = msg.getButtonCapabilities();
_displayCapabilities = msg.getDisplayCapabilities();
_softButtonCapabilities = msg.getSoftButtonCapabilities();
_presetBankCapabilities = msg.getPresetBankCapabilities();
_hmiZoneCapabilities = msg.getHmiZoneCapabilities();
_speechCapabilities = msg.getSpeechCapabilities();
_prerecordedSpeech = msg.getPrerecordedSpeech();
_sdlLanguage = msg.getLanguage();
_hmiDisplayLanguage = msg.getHmiDisplayLanguage();
_sdlMsgVersion = msg.getSdlMsgVersion();
_vrCapabilities = msg.getVrCapabilities();
_vehicleType = msg.getVehicleType();
_audioPassThruCapabilities = msg.getAudioPassThruCapabilities();
_proxyVersionInfo = msg.getProxyVersionInfo();
if (_bAppResumeEnabled)
{
if ( (msg.getResultCode() == Result.RESUME_FAILED) || (msg.getResultCode() != Result.SUCCESS) )
{
_bResumeSuccess = false;
_lastHashID = null;
}
else if ( (_sdlMsgVersion.getMajorVersion() > 2) && (_lastHashID != null) && (msg.getResultCode() == Result.SUCCESS) )
_bResumeSuccess = true;
}
_diagModes = msg.getSupportedDiagModes();
if (!isDebugEnabled())
{
enableDebugTool();
DebugTool.logInfo("SDL Proxy Version: " + _proxyVersionInfo);
disableDebugTool();
}
else
DebugTool.logInfo("SDL Proxy Version: " + _proxyVersionInfo);
// RegisterAppInterface
if (_advancedLifecycleManagementEnabled) {
// Send onSdlConnected message in ALM
_sdlConnectionState = SdlConnectionState.SDL_CONNECTED;
// If registerAppInterface failed, exit with OnProxyUnusable
if (!msg.getSuccess()) {
notifyProxyClosed("Unable to register app interface. Review values passed to the SdlProxy constructor. RegisterAppInterface result code: ",
new SdlException("Unable to register app interface. Review values passed to the SdlProxy constructor. RegisterAppInterface result code: " + msg.getResultCode(), SdlExceptionCause.SDL_REGISTRATION_ERROR), SdlDisconnectedReason.SDL_REGISTRATION_ERROR);
}
} else {
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onRegisterAppInterfaceResponse(msg);
} else if (_proxyListener instanceof IProxyListenerALM) {
//((IProxyListenerALM)_proxyListener).onRegisterAppInterfaceResponse(msg);
}
onPacketFinish(msg);
}
});
} else {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onRegisterAppInterfaceResponse(msg);
} else if (_proxyListener instanceof IProxyListenerALM) {
//((IProxyListenerALM)_proxyListener).onRegisterAppInterfaceResponse(msg);
}
onPacketFinish(msg);
}
}
} else if (functionName.equals(FunctionID.SPEAK)) {
// SpeakResponse
final SpeakResponse msg = new SpeakResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSpeakResponse(msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSpeakResponse(msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.ALERT)) {
// AlertResponse
final AlertResponse msg = new AlertResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onAlertResponse(msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onAlertResponse(msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SHOW)) {
// ShowResponse
final ShowResponse msg = new ShowResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onShowResponse((ShowResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onShowResponse((ShowResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.ADD_COMMAND)) {
// AddCommand
final AddCommandResponse msg = new AddCommandResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onAddCommandResponse((AddCommandResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onAddCommandResponse((AddCommandResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.DELETE_COMMAND)) {
// DeleteCommandResponse
final DeleteCommandResponse msg = new DeleteCommandResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDeleteCommandResponse((DeleteCommandResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onDeleteCommandResponse((DeleteCommandResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.ADD_SUB_MENU)) {
// AddSubMenu
final AddSubMenuResponse msg = new AddSubMenuResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onAddSubMenuResponse((AddSubMenuResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onAddSubMenuResponse((AddSubMenuResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.DELETE_SUB_MENU)) {
// DeleteSubMenu
final DeleteSubMenuResponse msg = new DeleteSubMenuResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDeleteSubMenuResponse((DeleteSubMenuResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onDeleteSubMenuResponse((DeleteSubMenuResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SUBSCRIBE_BUTTON)) {
// SubscribeButton
final SubscribeButtonResponse msg = new SubscribeButtonResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSubscribeButtonResponse((SubscribeButtonResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSubscribeButtonResponse((SubscribeButtonResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.UNSUBSCRIBE_BUTTON)) {
// UnsubscribeButton
final UnsubscribeButtonResponse msg = new UnsubscribeButtonResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onUnsubscribeButtonResponse((UnsubscribeButtonResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onUnsubscribeButtonResponse((UnsubscribeButtonResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SET_MEDIA_CLOCK_TIMER)) {
// SetMediaClockTimer
final SetMediaClockTimerResponse msg = new SetMediaClockTimerResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetMediaClockTimerResponse((SetMediaClockTimerResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSetMediaClockTimerResponse((SetMediaClockTimerResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.ENCODED_SYNC_P_DATA)) {
final SystemRequestResponse msg = new SystemRequestResponse(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.SYSTEM_REQUEST);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSystemRequestResponse(msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSystemRequestResponse(msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.CREATE_INTERACTION_CHOICE_SET)) {
// CreateInteractionChoiceSet
final CreateInteractionChoiceSetResponse msg = new CreateInteractionChoiceSetResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onCreateInteractionChoiceSetResponse((CreateInteractionChoiceSetResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onCreateInteractionChoiceSetResponse((CreateInteractionChoiceSetResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.DELETE_INTERACTION_CHOICE_SET)) {
// DeleteInteractionChoiceSet
final DeleteInteractionChoiceSetResponse msg = new DeleteInteractionChoiceSetResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDeleteInteractionChoiceSetResponse((DeleteInteractionChoiceSetResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onDeleteInteractionChoiceSetResponse((DeleteInteractionChoiceSetResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.PERFORM_INTERACTION)) {
// PerformInteraction
final PerformInteractionResponse msg = new PerformInteractionResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onPerformInteractionResponse((PerformInteractionResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onPerformInteractionResponse((PerformInteractionResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SET_GLOBAL_PROPERTIES)) {
// SetGlobalPropertiesResponse
final SetGlobalPropertiesResponse msg = new SetGlobalPropertiesResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetGlobalPropertiesResponse((SetGlobalPropertiesResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSetGlobalPropertiesResponse((SetGlobalPropertiesResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.RESET_GLOBAL_PROPERTIES)) {
// ResetGlobalProperties
final ResetGlobalPropertiesResponse msg = new ResetGlobalPropertiesResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onResetGlobalPropertiesResponse((ResetGlobalPropertiesResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onResetGlobalPropertiesResponse((ResetGlobalPropertiesResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.UNREGISTER_APP_INTERFACE)) {
// UnregisterAppInterface
_appInterfaceRegisterd = false;
synchronized(APP_INTERFACE_REGISTERED_LOCK) {
APP_INTERFACE_REGISTERED_LOCK.notify();
}
final UnregisterAppInterfaceResponse msg = new UnregisterAppInterfaceResponse(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.UNREGISTER_APP_INTERFACE);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_RESPONSE);
updateBroadcastIntent(sendIntent, "SUCCESS", msg.getSuccess());
updateBroadcastIntent(sendIntent, "COMMENT1", msg.getInfo());
updateBroadcastIntent(sendIntent, "COMMENT2", msg.getResultCode().toString());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
sendBroadcastIntent(sendIntent);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onUnregisterAppInterfaceResponse(msg);
} else if (_proxyListener instanceof IProxyListenerALM) {
//((IProxyListenerALM)_proxyListener).onUnregisterAppInterfaceResponse(msg);
}
onPacketFinish(msg);
}
});
} else {
if (_proxyListener instanceof IProxyListener) {
((IProxyListener)_proxyListener).onUnregisterAppInterfaceResponse(msg);
} else if (_proxyListener instanceof IProxyListenerALM) {
//((IProxyListenerALM)_proxyListener).onUnregisterAppInterfaceResponse(msg);
}
onPacketFinish(msg);
}
notifyProxyClosed("UnregisterAppInterfaceResponse", null, SdlDisconnectedReason.APP_INTERFACE_UNREG);
} else if (functionName.equals(FunctionID.GENERIC_RESPONSE)) {
// GenericResponse (Usually and error)
final GenericResponse msg = new GenericResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGenericResponse((GenericResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onGenericResponse((GenericResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SLIDER)) {
// Slider
final SliderResponse msg = new SliderResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSliderResponse((SliderResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSliderResponse((SliderResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.PUT_FILE)) {
// PutFile
final PutFileResponse msg = new PutFileResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onPutFileResponse((PutFileResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onPutFileResponse((PutFileResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.DELETE_FILE)) {
// DeleteFile
final DeleteFileResponse msg = new DeleteFileResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDeleteFileResponse((DeleteFileResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onDeleteFileResponse((DeleteFileResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.LIST_FILES)) {
// ListFiles
final ListFilesResponse msg = new ListFilesResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onListFilesResponse((ListFilesResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onListFilesResponse((ListFilesResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SET_APP_ICON)) {
// SetAppIcon
final SetAppIconResponse msg = new SetAppIconResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetAppIconResponse((SetAppIconResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSetAppIconResponse((SetAppIconResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SCROLLABLE_MESSAGE)) {
// ScrollableMessage
final ScrollableMessageResponse msg = new ScrollableMessageResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onScrollableMessageResponse((ScrollableMessageResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onScrollableMessageResponse((ScrollableMessageResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.CHANGE_REGISTRATION)) {
// ChangeLanguageRegistration
final ChangeRegistrationResponse msg = new ChangeRegistrationResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onChangeRegistrationResponse((ChangeRegistrationResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onChangeRegistrationResponse((ChangeRegistrationResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SET_DISPLAY_LAYOUT)) {
// SetDisplayLayout
final SetDisplayLayoutResponse msg = new SetDisplayLayoutResponse(hash);
// successfully changed display layout - update layout capabilities
if(msg.getSuccess()){
_displayCapabilities = msg.getDisplayCapabilities();
_buttonCapabilities = msg.getButtonCapabilities();
_presetBankCapabilities = msg.getPresetBankCapabilities();
_softButtonCapabilities = msg.getSoftButtonCapabilities();
}
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSetDisplayLayoutResponse((SetDisplayLayoutResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSetDisplayLayoutResponse((SetDisplayLayoutResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.PERFORM_AUDIO_PASS_THRU)) {
// PerformAudioPassThru
final PerformAudioPassThruResponse msg = new PerformAudioPassThruResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onPerformAudioPassThruResponse((PerformAudioPassThruResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onPerformAudioPassThruResponse((PerformAudioPassThruResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.END_AUDIO_PASS_THRU)) {
// EndAudioPassThru
final EndAudioPassThruResponse msg = new EndAudioPassThruResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onEndAudioPassThruResponse((EndAudioPassThruResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onEndAudioPassThruResponse((EndAudioPassThruResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.SUBSCRIBE_VEHICLE_DATA)) {
// SubscribeVehicleData
final SubscribeVehicleDataResponse msg = new SubscribeVehicleDataResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSubscribeVehicleDataResponse((SubscribeVehicleDataResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSubscribeVehicleDataResponse((SubscribeVehicleDataResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.UNSUBSCRIBE_VEHICLE_DATA)) {
// UnsubscribeVehicleData
final UnsubscribeVehicleDataResponse msg = new UnsubscribeVehicleDataResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onUnsubscribeVehicleDataResponse((UnsubscribeVehicleDataResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onUnsubscribeVehicleDataResponse((UnsubscribeVehicleDataResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.GET_VEHICLE_DATA)) {
// GetVehicleData
final GetVehicleDataResponse msg = new GetVehicleDataResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGetVehicleDataResponse((GetVehicleDataResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onGetVehicleDataResponse((GetVehicleDataResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.READ_DID)) {
final ReadDIDResponse msg = new ReadDIDResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onReadDIDResponse((ReadDIDResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onReadDIDResponse((ReadDIDResponse)msg);
}
} else if (functionName.equals(FunctionID.GET_DTCS)) {
final GetDTCsResponse msg = new GetDTCsResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onGetDTCsResponse((GetDTCsResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onGetDTCsResponse((GetDTCsResponse)msg);
onPacketFinish(msg);
}
} else if (functionName.equals(FunctionID.DIAGNOSTIC_MESSAGE)) {
final DiagnosticMessageResponse msg = new DiagnosticMessageResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onDiagnosticMessageResponse((DiagnosticMessageResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onDiagnosticMessageResponse((DiagnosticMessageResponse)msg);
onPacketFinish(msg);
}
}
else if (functionName.equals(FunctionID.SYSTEM_REQUEST)) {
final SystemRequestResponse msg = new SystemRequestResponse(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onSystemRequestResponse((SystemRequestResponse)msg);
onPacketFinish(msg);
}
});
} else {
_proxyListener.onSystemRequestResponse((SystemRequestResponse)msg);
onPacketFinish(msg);
}
}
else {
if (_sdlMsgVersion != null) {
DebugTool.logError("Unrecognized response Message: " + functionName.toString() +
"SDL Message Version = " + _sdlMsgVersion);
} else {
DebugTool.logError("Unrecognized response Message: " + functionName.toString());
}
} // end-if
} else if (messageType.equals(RPCMessage.KEY_NOTIFICATION)) {
SdlTrace.logRPCEvent(InterfaceActivityDirection.Receive, new RPCNotification(rpcMsg), SDL_LIB_TRACE_KEY);
if (functionName.equals(FunctionID.ON_HMI_STATUS)) {
// OnHMIStatus
final OnHMIStatus msg = new OnHMIStatus(hash);
//setup lockscreeninfo
if (sdlSession != null)
{
sdlSession.getLockScreenMan().setHMILevel(msg.getHmiLevel());
}
msg.setFirstRun(Boolean.valueOf(firstTimeFull));
if (msg.getHmiLevel() == HMILevel.HMI_FULL) firstTimeFull = false;
if (msg.getHmiLevel() != _priorHmiLevel && msg.getAudioStreamingState() != _priorAudioStreamingState) {
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnHMIStatus((OnHMIStatus)msg);
_proxyListener.onOnLockScreenNotification(sdlSession.getLockScreenMan().getLockObj());
}
});
} else {
_proxyListener.onOnHMIStatus((OnHMIStatus)msg);
_proxyListener.onOnLockScreenNotification(sdlSession.getLockScreenMan().getLockObj());
}
}
} else if (functionName.equals(FunctionID.ON_COMMAND)) {
// OnCommand
final OnCommand msg = new OnCommand(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnCommand((OnCommand)msg);
}
});
} else {
_proxyListener.onOnCommand((OnCommand)msg);
}
} else if (functionName.equals(FunctionID.ON_DRIVER_DISTRACTION)) {
// OnDriverDistration
final OnDriverDistraction msg = new OnDriverDistraction(hash);
//setup lockscreeninfo
if (sdlSession != null)
{
DriverDistractionState drDist = msg.getState();
boolean bVal = false;
if (drDist == DriverDistractionState.DD_ON)
bVal = true;
else
bVal = false;
sdlSession.getLockScreenMan().setDriverDistStatus(bVal);
}
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnDriverDistraction(msg);
_proxyListener.onOnLockScreenNotification(sdlSession.getLockScreenMan().getLockObj());
}
});
} else {
_proxyListener.onOnDriverDistraction(msg);
_proxyListener.onOnLockScreenNotification(sdlSession.getLockScreenMan().getLockObj());
}
} else if (functionName.equals(FunctionID.ON_ENCODED_SYNC_P_DATA)) {
final OnSystemRequest msg = new OnSystemRequest(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.ON_SYSTEM_REQUEST);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_NOTIFICATION);
// If url is null, then send notification to the app, otherwise, send to URL
if (msg.getUrl() == null) {
updateBroadcastIntent(sendIntent, "COMMENT1", "URL is a null value (received)");
sendBroadcastIntent(sendIntent);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnSystemRequest(msg);
}
});
} else {
_proxyListener.onOnSystemRequest(msg);
}
} else {
updateBroadcastIntent(sendIntent, "COMMENT1", "Sending to cloud: " + msg.getUrl());
sendBroadcastIntent(sendIntent);
Log.i("pt", "send to url");
if ( (msg.getUrl() != null) )
{
Thread handleOffboardTransmissionThread = new Thread() {
@Override
public void run() {
sendOnSystemRequestToUrl(msg);
}
};
handleOffboardTransmissionThread.start();
}
}
} else if (functionName.equals(FunctionID.ON_PERMISSIONS_CHANGE)) {
final OnPermissionsChange msg = new OnPermissionsChange(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnPermissionsChange(msg);
}
});
} else {
_proxyListener.onOnPermissionsChange(msg);
}
} else if (functionName.equals(FunctionID.ON_TBT_CLIENT_STATE)) {
// OnTBTClientState
final OnTBTClientState msg = new OnTBTClientState(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnTBTClientState(msg);
}
});
} else {
_proxyListener.onOnTBTClientState(msg);
}
} else if (functionName.equals(FunctionID.ON_BUTTON_PRESS)) {
// OnButtonPress
final OnButtonPress msg = new OnButtonPress(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnButtonPress((OnButtonPress)msg);
}
});
} else {
_proxyListener.onOnButtonPress((OnButtonPress)msg);
}
} else if (functionName.equals(FunctionID.ON_BUTTON_EVENT)) {
// OnButtonEvent
final OnButtonEvent msg = new OnButtonEvent(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnButtonEvent((OnButtonEvent)msg);
}
});
} else {
_proxyListener.onOnButtonEvent((OnButtonEvent)msg);
}
} else if (functionName.equals(FunctionID.ON_LANGUAGE_CHANGE)) {
// OnLanguageChange
final OnLanguageChange msg = new OnLanguageChange(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnLanguageChange((OnLanguageChange)msg);
}
});
} else {
_proxyListener.onOnLanguageChange((OnLanguageChange)msg);
}
} else if (functionName.equals(FunctionID.ON_HASH_CHANGE)) {
// OnLanguageChange
final OnHashChange msg = new OnHashChange(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnHashChange((OnHashChange)msg);
if (_bAppResumeEnabled)
{
_lastHashID = msg.getHashID();
}
}
});
} else {
_proxyListener.onOnHashChange((OnHashChange)msg);
if (_bAppResumeEnabled)
{
_lastHashID = msg.getHashID();
}
}
} else if (functionName.equals(FunctionID.ON_SYSTEM_REQUEST)) {
// OnSystemRequest
final OnSystemRequest msg = new OnSystemRequest(hash);
if ( (msg.getUrl() != null) &&
(msg.getRequestType() == RequestType.PROPRIETARY) &&
(msg.getFileType() == FileType.JSON) )
{
Thread handleOffboardTransmissionThread = new Thread() {
@Override
public void run() {
sendOnSystemRequestToUrl(msg);
}
};
handleOffboardTransmissionThread.start();
}
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnSystemRequest((OnSystemRequest)msg);
}
});
} else {
_proxyListener.onOnSystemRequest((OnSystemRequest)msg);
}
} else if (functionName.equals(FunctionID.ON_AUDIO_PASS_THRU)) {
// OnAudioPassThru
final OnAudioPassThru msg = new OnAudioPassThru(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnAudioPassThru((OnAudioPassThru)msg);
}
});
} else {
_proxyListener.onOnAudioPassThru((OnAudioPassThru)msg);
}
} else if (functionName.equals(FunctionID.ON_VEHICLE_DATA)) {
// OnVehicleData
final OnVehicleData msg = new OnVehicleData(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnVehicleData((OnVehicleData)msg);
}
});
} else {
_proxyListener.onOnVehicleData((OnVehicleData)msg);
}
}
else if (functionName.equals(FunctionID.ON_APP_INTERFACE_UNREGISTERED)) {
// OnAppInterfaceUnregistered
_appInterfaceRegisterd = false;
synchronized(APP_INTERFACE_REGISTERED_LOCK) {
APP_INTERFACE_REGISTERED_LOCK.notify();
}
final OnAppInterfaceUnregistered msg = new OnAppInterfaceUnregistered(hash);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.ON_APP_INTERFACE_UNREGISTERED);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_NOTIFICATION);
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
sendBroadcastIntent(sendIntent);
if (_advancedLifecycleManagementEnabled) {
// This requires the proxy to be cycled
cycleProxy(SdlDisconnectedReason.convertAppInterfaceUnregisteredReason(msg.getReason()));
} else {
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
((IProxyListener)_proxyListener).onOnAppInterfaceUnregistered(msg);
}
});
} else {
((IProxyListener)_proxyListener).onOnAppInterfaceUnregistered(msg);
}
notifyProxyClosed("OnAppInterfaceUnregistered", null, SdlDisconnectedReason.APP_INTERFACE_UNREG);
}
}
else if (functionName.equals(FunctionID.ON_KEYBOARD_INPUT)) {
final OnKeyboardInput msg = new OnKeyboardInput(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnKeyboardInput((OnKeyboardInput)msg);
}
});
} else {
_proxyListener.onOnKeyboardInput((OnKeyboardInput)msg);
}
}
else if (functionName.equals(FunctionID.ON_TOUCH_EVENT)) {
final OnTouchEvent msg = new OnTouchEvent(hash);
if (_callbackToUIThread) {
// Run in UI thread
_mainUIHandler.post(new Runnable() {
@Override
public void run() {
_proxyListener.onOnTouchEvent((OnTouchEvent)msg);
}
});
} else {
_proxyListener.onOnTouchEvent((OnTouchEvent)msg);
}
}
else {
if (_sdlMsgVersion != null) {
DebugTool.logInfo("Unrecognized notification Message: " + functionName.toString() +
" connected to SDL using message version: " + _sdlMsgVersion.getMajorVersion() + "." + _sdlMsgVersion.getMinorVersion());
} else {
DebugTool.logInfo("Unrecognized notification Message: " + functionName.toString());
}
} // end-if
} // end-if notification
SdlTrace.logProxyEvent("Proxy received RPC Message: " + functionName, SDL_LIB_TRACE_KEY);
}
/**
* Takes an RPCRequest and sends it to SDL. Responses are captured through callback on IProxyListener.
*
* @param request
* @throws SdlException
*/
public void sendRPCRequest(RPCRequest request) throws SdlException {
if (_proxyDisposed) {
throw new SdlException("This object has been disposed, it is no long capable of executing methods.", SdlExceptionCause.SDL_PROXY_DISPOSED);
}
// Test if request is null
if (request == null) {
SdlTrace.logProxyEvent("Application called sendRPCRequest method with a null RPCRequest.", SDL_LIB_TRACE_KEY);
throw new IllegalArgumentException("sendRPCRequest cannot be called with a null request.");
}
SdlTrace.logProxyEvent("Application called sendRPCRequest method for RPCRequest: ." + request.getFunctionName(), SDL_LIB_TRACE_KEY);
// Test if SdlConnection is null
synchronized(CONNECTION_REFERENCE_LOCK) {
if (sdlSession == null || !sdlSession.getIsConnected()) {
SdlTrace.logProxyEvent("Application attempted to send and RPCRequest without a connected transport.", SDL_LIB_TRACE_KEY);
throw new SdlException("There is no valid connection to SDL. sendRPCRequest cannot be called until SDL has been connected.", SdlExceptionCause.SDL_UNAVAILABLE);
}
}
if (isCorrelationIDProtected(request.getCorrelationID())) {
SdlTrace.logProxyEvent("Application attempted to use the reserved correlation ID, " + request.getCorrelationID(), SDL_LIB_TRACE_KEY);
throw new SdlException("Invalid correlation ID. The correlation ID, " + request.getCorrelationID()
+ " , is a reserved correlation ID.", SdlExceptionCause.RESERVED_CORRELATION_ID);
}
// Throw exception if RPCRequest is sent when SDL is unavailable
if (!_appInterfaceRegisterd && !request.getFunctionName().equals(FunctionID.REGISTER_APP_INTERFACE)) {
SdlTrace.logProxyEvent("Application attempted to send an RPCRequest (non-registerAppInterface), before the interface was registerd.", SDL_LIB_TRACE_KEY);
throw new SdlException("SDL is currently unavailable. RPC Requests cannot be sent.", SdlExceptionCause.SDL_UNAVAILABLE);
}
if (_advancedLifecycleManagementEnabled) {
if (request.getFunctionName().equals(FunctionID.REGISTER_APP_INTERFACE)
|| request.getFunctionName().equals(FunctionID.UNREGISTER_APP_INTERFACE)) {
SdlTrace.logProxyEvent("Application attempted to send a RegisterAppInterface or UnregisterAppInterface while using ALM.", SDL_LIB_TRACE_KEY);
throw new SdlException("The RPCRequest, " + request.getFunctionName() +
", is unallowed using the Advanced Lifecycle Management Model.", SdlExceptionCause.INCORRECT_LIFECYCLE_MODEL);
}
}
sendRPCRequestPrivate(request);
} // end-method
protected void notifyProxyClosed(final String info, final Exception e, final SdlDisconnectedReason reason) {
SdlTrace.logProxyEvent("NotifyProxyClose", SDL_LIB_TRACE_KEY);
OnProxyClosed message = new OnProxyClosed(info, e, reason);
queueInternalMessage(message);
}
private void passErrorToProxyListener(final String info, final Exception e) {
OnError message = new OnError(info, e);
queueInternalMessage(message);
}
private void startRPCProtocolSession(byte sessionID, String correlationID) {
// Set Proxy Lifecyclek Available
if (_advancedLifecycleManagementEnabled) {
try {
registerAppInterfacePrivate(
_sdlMsgVersionRequest,
_applicationName,
_ttsName,
_ngnMediaScreenAppName,
_vrSynonyms,
_isMediaApp,
_sdlLanguageDesired,
_hmiDisplayLanguageDesired,
_appType,
_appID,
_autoActivateIdDesired,
REGISTER_APP_INTERFACE_CORRELATION_ID);
} catch (Exception e) {
notifyProxyClosed("Failed to register application interface with SDL. Check parameter values given to SdlProxy constructor.", e, SdlDisconnectedReason.SDL_REGISTRATION_ERROR);
}
} else {
InternalProxyMessage message = new InternalProxyMessage(InternalProxyMessage.OnProxyOpened);
queueInternalMessage(message);
}
}
// Queue internal callback message
private void queueInternalMessage(InternalProxyMessage message) {
synchronized(INTERNAL_MESSAGE_QUEUE_THREAD_LOCK) {
if (_internalProxyMessageDispatcher != null) {
_internalProxyMessageDispatcher.queueMessage(message);
}
}
}
// Queue incoming ProtocolMessage
private void queueIncomingMessage(ProtocolMessage message) {
synchronized(INCOMING_MESSAGE_QUEUE_THREAD_LOCK) {
if (_incomingProxyMessageDispatcher != null) {
_incomingProxyMessageDispatcher.queueMessage(message);
}
}
}
public boolean startRPCStream(InputStream is, RPCRequest msg) {
if (sdlSession == null) return false;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return false;
sdlConn.startRPCStream(is, msg, SessionType.RPC, sdlSession.getSessionId(), _wiproVersion);
return true;
}
public OutputStream startRPCStream(RPCRequest msg) {
if (sdlSession == null) return null;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return null;
return sdlConn.startRPCStream(msg, SessionType.RPC, sdlSession.getSessionId(), _wiproVersion);
}
public void endRPCStream() {
if (sdlSession == null) return;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return;
sdlConn.stopStream();
}
public boolean startH264(InputStream is) {
if (sdlSession == null) return false;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return false;
navServiceResponseReceived = false;
navServiceResponse = false;
sdlConn.startService(SessionType.NAV, sdlSession.getSessionId());
int infiniteLoopKiller = 0;
while (!navServiceResponseReceived && infiniteLoopKiller<2147483647) {
infiniteLoopKiller++;
}
if (navServiceResponse) {
sdlConn.startStream(is, SessionType.NAV, sdlSession.getSessionId());
return true;
} else {
return false;
}
}
public OutputStream startH264() {
if (sdlSession == null) return null;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return null;
navServiceResponseReceived = false;
navServiceResponse = false;
sdlConn.startService(SessionType.NAV, sdlSession.getSessionId());
int infiniteLoopKiller = 0;
while (!navServiceResponseReceived && infiniteLoopKiller<2147483647) {
infiniteLoopKiller++;
}
if (navServiceResponse) {
return sdlConn.startStream(SessionType.NAV, sdlSession.getSessionId());
} else {
return null;
}
}
public void endH264() {
if (sdlSession == null) return;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return;
sdlConn.endService(SessionType.NAV, sdlSession.getSessionId());
}
public boolean startPCM(InputStream is) {
if (sdlSession == null) return false;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return false;
navServiceResponseReceived = false;
navServiceResponse = false;
sdlConn.startService(SessionType.PCM, sdlSession.getSessionId());
int infiniteLoopKiller = 0;
while (!navServiceResponseReceived && infiniteLoopKiller<2147483647) {
infiniteLoopKiller++;
}
if (navServiceResponse) {
sdlConn.startStream(is, SessionType.PCM, sdlSession.getSessionId());
return true;
} else {
return false;
}
}
public OutputStream startPCM() {
if (sdlSession == null) return null;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return null;
navServiceResponseReceived = false;
navServiceResponse = false;
sdlConn.startService(SessionType.PCM, sdlSession.getSessionId());
int infiniteLoopKiller = 0;
while (!navServiceResponseReceived && infiniteLoopKiller<2147483647) {
infiniteLoopKiller++;
}
if (navServiceResponse) {
return sdlConn.startStream(SessionType.PCM, sdlSession.getSessionId());
} else {
return null;
}
}
public void endPCM() {
if (sdlSession == null) return;
SdlConnection sdlConn = sdlSession.getSdlConnection();
if (sdlConn == null) return;
sdlConn.endService(SessionType.PCM, sdlSession.getSessionId());
}
private void NavServiceStarted() {
navServiceResponseReceived = true;
navServiceResponse = true;
}
private void NavServiceEnded() {
navServiceResponseReceived = true;
navServiceResponse = false;
}
@SuppressWarnings("unused")
private void AudioServiceStarted() {
pcmServiceResponseReceived = true;
pcmServiceResponse = true;
}
@SuppressWarnings("unused")
private void AudioServiceEnded() {
pcmServiceResponseReceived = true;
pcmServiceResponse = false;
}
public void setAppService(Service mService)
{
_appService = mService;
}
/*Begin V1 Enhanced helper*/
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param parentID -Menu parent ID for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
*@param IconType -Describes whether the image is static or dynamic
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Integer parentID, Integer position,
Vector<String> vrCommands, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
AddCommand msg = RPCRequestFactory.buildAddCommand(commandID, menuText, parentID, position,
vrCommands, IconValue, IconType, correlationID);
sendRPCRequest(msg);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
*@param IconType -Describes whether the image is static or dynamic
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Integer position,
Vector<String> vrCommands, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, position, vrCommands, IconValue, IconType, correlationID);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
*@param IconType -Describes whether the image is static or dynamic
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Integer position, String IconValue, ImageType IconType,
Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, position, null, IconValue, IconType, correlationID);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
*@param IconType -Describes whether the image is static or dynamic
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, null, null, IconValue, IconType, correlationID);
}
/**
* Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param commandID -Unique command ID of the command to add.
* @param menuText -Menu text for optional sub value containing menu parameters.
* @param vrCommands -VR synonyms for this AddCommand.
* @param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
* @param IconType -Describes whether the image is static or dynamic
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Vector<String> vrCommands, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, null, vrCommands, IconValue, IconType, correlationID);
}
/**
* Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param commandID -Unique command ID of the command to add.
* @param vrCommands -VR synonyms for this AddCommand.
* @param IconValue -A static hex icon value or the binary image file name identifier (sent by the PutFile RPC).
* @param IconType -Describes whether the image is static or dynamic
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void addCommand(Integer commandID,
Vector<String> vrCommands, String IconValue, ImageType IconType, Integer correlationID)
throws SdlException {
addCommand(commandID, null, null, null, vrCommands, IconValue, IconType, correlationID);
}
/*End V1 Enhanced helper*/
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param parentID -Menu parent ID for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Integer parentID, Integer position,
Vector<String> vrCommands, Integer correlationID)
throws SdlException {
AddCommand msg = RPCRequestFactory.buildAddCommand(commandID, menuText, parentID, position,
vrCommands, correlationID);
sendRPCRequest(msg);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Integer position,
Vector<String> vrCommands, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, position, vrCommands, correlationID);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param position -Menu position for optional sub value containing menu parameters.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Integer position,
Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, position, null, correlationID);
}
/**
*Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Integer correlationID)
throws SdlException {
Vector<String> vrCommands = null;
addCommand(commandID, menuText, null, null, vrCommands, correlationID);
}
/**
* Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param menuText -Menu text for optional sub value containing menu parameters.
*@param vrCommands -VR synonyms for this AddCommand.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
String menuText, Vector<String> vrCommands, Integer correlationID)
throws SdlException {
addCommand(commandID, menuText, null, null, vrCommands, correlationID);
}
/**
* Sends an AddCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
*@param commandID -Unique command ID of the command to add.
*@param vrCommands -VR synonyms for this AddCommand.
*@param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
*@throws SdlException
*/
public void addCommand(Integer commandID,
Vector<String> vrCommands, Integer correlationID)
throws SdlException {
addCommand(commandID, null, null, null, vrCommands, correlationID);
}
/**
* Sends an AddSubMenu RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param menuID -Unique ID of the sub menu to add.
* @param menuName -Text to show in the menu for this sub menu.
* @param position -Position within the items that are are at top level of the in application menu.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void addSubMenu(Integer menuID, String menuName,
Integer position, Integer correlationID)
throws SdlException {
AddSubMenu msg = RPCRequestFactory.buildAddSubMenu(menuID, menuName,
position, correlationID);
sendRPCRequest(msg);
}
/**
* Sends an AddSubMenu RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param menuID -Unique ID of the sub menu to add.
* @param menuName -Text to show in the menu for this sub menu.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void addSubMenu(Integer menuID, String menuName,
Integer correlationID) throws SdlException {
addSubMenu(menuID, menuName, null, correlationID);
}
/*Begin V1 Enhanced helper*/
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param alertText3 -The optional third line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(String ttsText, String alertText1,
String alertText2, String alertText3, Boolean playTone, Integer duration, Vector<SoftButton> softButtons,
Integer correlationID) throws SdlException {
Alert msg = RPCRequestFactory.buildAlert(ttsText, alertText1, alertText2, alertText3, playTone, duration, softButtons, correlationID);
sendRPCRequest(msg);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsChunks -Text/phonemes to speak in the form of ttsChunks.
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param alertText3 -The optional third line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(Vector<TTSChunk> ttsChunks,
String alertText1, String alertText2, String alertText3, Boolean playTone,
Integer duration, Vector<SoftButton> softButtons, Integer correlationID) throws SdlException {
Alert msg = RPCRequestFactory.buildAlert(ttsChunks, alertText1, alertText2, alertText3, playTone, duration, softButtons, correlationID);
sendRPCRequest(msg);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param playTone -Defines if tone should be played.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(String ttsText, Boolean playTone, Vector<SoftButton> softButtons,
Integer correlationID) throws SdlException {
alert(ttsText, null, null, null, playTone, null, softButtons, correlationID);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param chunks -A list of text/phonemes to speak in the form of ttsChunks.
* @param playTone -Defines if tone should be played.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(Vector<TTSChunk> chunks, Boolean playTone, Vector<SoftButton> softButtons,
Integer correlationID) throws SdlException {
alert(chunks, null, null, null, playTone, null, softButtons, correlationID);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param alertText3 -The optional third line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param softButtons -A list of App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(String alertText1, String alertText2, String alertText3,
Boolean playTone, Integer duration, Vector<SoftButton> softButtons, Integer correlationID)
throws SdlException {
alert((Vector<TTSChunk>)null, alertText1, alertText2, alertText3, playTone, duration, softButtons, correlationID);
}
/*End V1 Enhanced helper*/
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(String ttsText, String alertText1,
String alertText2, Boolean playTone, Integer duration,
Integer correlationID) throws SdlException {
Alert msg = RPCRequestFactory.buildAlert(ttsText, alertText1, alertText2,
playTone, duration, correlationID);
sendRPCRequest(msg);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsChunks -A list of text/phonemes to speak in the form of ttsChunks.
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(Vector<TTSChunk> ttsChunks,
String alertText1, String alertText2, Boolean playTone,
Integer duration, Integer correlationID) throws SdlException {
Alert msg = RPCRequestFactory.buildAlert(ttsChunks, alertText1, alertText2, playTone,
duration, correlationID);
sendRPCRequest(msg);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param playTone -Defines if tone should be played.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(String ttsText, Boolean playTone,
Integer correlationID) throws SdlException {
alert(ttsText, null, null, playTone, null, correlationID);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param chunks -A list of text/phonemes to speak in the form of ttsChunks.
* @param playTone -Defines if tone should be played.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(Vector<TTSChunk> chunks, Boolean playTone,
Integer correlationID) throws SdlException {
alert(chunks, null, null, playTone, null, correlationID);
}
/**
* Sends an Alert RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param alertText1 -The first line of the alert text field.
* @param alertText2 -The second line of the alert text field.
* @param playTone -Defines if tone should be played.
* @param duration -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void alert(String alertText1, String alertText2,
Boolean playTone, Integer duration, Integer correlationID)
throws SdlException {
alert((Vector<TTSChunk>)null, alertText1, alertText2, playTone, duration, correlationID);
}
/**
* Sends a CreateInteractionChoiceSet RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param choiceSet
* @param interactionChoiceSetID
* @param correlationID
* @throws SdlException
*/
public void createInteractionChoiceSet(
Vector<Choice> choiceSet, Integer interactionChoiceSetID,
Integer correlationID) throws SdlException {
CreateInteractionChoiceSet msg = RPCRequestFactory.buildCreateInteractionChoiceSet(
choiceSet, interactionChoiceSetID, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a DeleteCommand RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param commandID -ID of the command(s) to delete.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void deleteCommand(Integer commandID,
Integer correlationID) throws SdlException {
DeleteCommand msg = RPCRequestFactory.buildDeleteCommand(commandID, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a DeleteInteractionChoiceSet RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param interactionChoiceSetID -ID of the interaction choice set to delete.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void deleteInteractionChoiceSet(
Integer interactionChoiceSetID, Integer correlationID)
throws SdlException {
DeleteInteractionChoiceSet msg = RPCRequestFactory.buildDeleteInteractionChoiceSet(
interactionChoiceSetID, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a DeleteSubMenu RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param menuID -The menuID of the submenu to delete.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse
* @throws SdlException
*/
public void deleteSubMenu(Integer menuID,
Integer correlationID) throws SdlException {
DeleteSubMenu msg = RPCRequestFactory.buildDeleteSubMenu(menuID, correlationID);
sendRPCRequest(msg);
}
/*Begin V1 Enhanced helper*/
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetID -Interaction choice set IDs to use with an interaction.
* @param vrHelp -Suggested VR Help Items to display on-screen during Perform Interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performInteraction(String initPrompt,
String displayText, Integer interactionChoiceSetID, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
PerformInteraction msg = RPCRequestFactory.buildPerformInteraction(initPrompt,
displayText, interactionChoiceSetID, vrHelp, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetID -Interaction choice set IDs to use with an interaction.
* @param helpPrompt -Help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutPrompt -Timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param vrHelp -Suggested VR Help Items to display on-screen during Perform Interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performInteraction(String initPrompt,
String displayText, Integer interactionChoiceSetID,
String helpPrompt, String timeoutPrompt,
InteractionMode interactionMode, Integer timeout, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
PerformInteraction msg = RPCRequestFactory.buildPerformInteraction(
initPrompt, displayText, interactionChoiceSetID,
helpPrompt, timeoutPrompt, interactionMode,
timeout, vrHelp, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetIDList -A list of interaction choice set IDs to use with an interaction.
* @param helpPrompt -Help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutPrompt -Timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param vrHelp -Suggested VR Help Items to display on-screen during Perform Interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performInteraction(String initPrompt,
String displayText, Vector<Integer> interactionChoiceSetIDList,
String helpPrompt, String timeoutPrompt,
InteractionMode interactionMode, Integer timeout, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
PerformInteraction msg = RPCRequestFactory.buildPerformInteraction(initPrompt,
displayText, interactionChoiceSetIDList,
helpPrompt, timeoutPrompt, interactionMode, timeout, vrHelp,
correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initChunks -A list of text/phonemes to speak for the initial prompt in the form of ttsChunks.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetIDList -A list of interaction choice set IDs to use with an interaction.
* @param helpChunks -A list of text/phonemes to speak for the help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutChunks A list of text/phonems to speak for the timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param vrHelp -Suggested VR Help Items to display on-screen during Perform Interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performInteraction(
Vector<TTSChunk> initChunks, String displayText,
Vector<Integer> interactionChoiceSetIDList,
Vector<TTSChunk> helpChunks, Vector<TTSChunk> timeoutChunks,
InteractionMode interactionMode, Integer timeout, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
PerformInteraction msg = RPCRequestFactory.buildPerformInteraction(
initChunks, displayText, interactionChoiceSetIDList,
helpChunks, timeoutChunks, interactionMode, timeout,vrHelp,
correlationID);
sendRPCRequest(msg);
}
/*End V1 Enhanced*/
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetID -Interaction choice set IDs to use with an interaction.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performInteraction(String initPrompt,
String displayText, Integer interactionChoiceSetID,
Integer correlationID) throws SdlException {
PerformInteraction msg = RPCRequestFactory.buildPerformInteraction(initPrompt,
displayText, interactionChoiceSetID, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetID -Interaction choice set IDs to use with an interaction.
* @param helpPrompt -Help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutPrompt -Timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performInteraction(String initPrompt,
String displayText, Integer interactionChoiceSetID,
String helpPrompt, String timeoutPrompt,
InteractionMode interactionMode, Integer timeout,
Integer correlationID) throws SdlException {
PerformInteraction msg = RPCRequestFactory.buildPerformInteraction(
initPrompt, displayText, interactionChoiceSetID,
helpPrompt, timeoutPrompt, interactionMode,
timeout, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initPrompt -Intial prompt spoken to the user at the start of an interaction.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetIDList -A list of interaction choice set IDs to use with an interaction.
* @param helpPrompt -Help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutPrompt -Timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performInteraction(String initPrompt,
String displayText, Vector<Integer> interactionChoiceSetIDList,
String helpPrompt, String timeoutPrompt,
InteractionMode interactionMode, Integer timeout,
Integer correlationID) throws SdlException {
PerformInteraction msg = RPCRequestFactory.buildPerformInteraction(initPrompt,
displayText, interactionChoiceSetIDList,
helpPrompt, timeoutPrompt, interactionMode, timeout,
correlationID);
sendRPCRequest(msg);
}
/**
* Sends a PerformInteraction RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param initChunks -A list of text/phonemes to speak for the initial prompt in the form of ttsChunks.
* @param displayText -Text to be displayed first.
* @param interactionChoiceSetIDList -A list of interaction choice set IDs to use with an interaction.
* @param helpChunks -A list of text/phonemes to speak for the help text that is spoken when a user speaks "help" during the interaction.
* @param timeoutChunks A list of text/phonems to speak for the timeout text that is spoken when a VR interaction times out.
* @param interactionMode - The method in which the user is notified and uses the interaction (Manual,VR,Both).
* @param timeout -Timeout in milliseconds.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performInteraction(
Vector<TTSChunk> initChunks, String displayText,
Vector<Integer> interactionChoiceSetIDList,
Vector<TTSChunk> helpChunks, Vector<TTSChunk> timeoutChunks,
InteractionMode interactionMode, Integer timeout,
Integer correlationID) throws SdlException {
PerformInteraction msg = RPCRequestFactory.buildPerformInteraction(
initChunks, displayText, interactionChoiceSetIDList,
helpChunks, timeoutChunks, interactionMode, timeout,
correlationID);
sendRPCRequest(msg);
}
// Protected registerAppInterface used to ensure only non-ALM applications call
// reqisterAppInterface
protected void registerAppInterfacePrivate(
SdlMsgVersion sdlMsgVersion, String appName, Vector<TTSChunk> ttsName,
String ngnMediaScreenAppName, Vector<String> vrSynonyms, Boolean isMediaApp,
Language languageDesired, Language hmiDisplayLanguageDesired, Vector<AppHMIType> appType,
String appID, String autoActivateID, Integer correlationID)
throws SdlException {
RegisterAppInterface msg = RPCRequestFactory.buildRegisterAppInterface(
sdlMsgVersion, appName, ttsName, ngnMediaScreenAppName, vrSynonyms, isMediaApp,
languageDesired, hmiDisplayLanguageDesired, appType, appID, correlationID);
if (_bAppResumeEnabled)
{
if (_lastHashID != null)
msg.setHashID(_lastHashID);
}
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.REGISTER_APP_INTERFACE);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_REQUEST);
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
sendBroadcastIntent(sendIntent);
sendRPCRequestPrivate(msg);
}
/*Begin V1 Enhanced helper function*/
/**
* Sends a SetGlobalProperties RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param helpPrompt
* @param timeoutPrompt
* @param vrHelpTitle
* @param vrHelp
* @param correlationID
* @throws SdlException
*/
public void setGlobalProperties(
String helpPrompt, String timeoutPrompt, String vrHelpTitle, Vector<VrHelpItem> vrHelp, Integer correlationID)
throws SdlException {
SetGlobalProperties req = RPCRequestFactory.buildSetGlobalProperties(helpPrompt,
timeoutPrompt, vrHelpTitle, vrHelp, correlationID);
sendRPCRequest(req);
}
/**
* Sends a SetGlobalProperties RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param helpChunks
* @param timeoutChunks
* @param vrHelpTitle
* @param vrHelp
* @param correlationID
* @throws SdlException
*/
public void setGlobalProperties(
Vector<TTSChunk> helpChunks, Vector<TTSChunk> timeoutChunks, String vrHelpTitle, Vector<VrHelpItem> vrHelp,
Integer correlationID) throws SdlException {
SetGlobalProperties req = RPCRequestFactory.buildSetGlobalProperties(
helpChunks, timeoutChunks, vrHelpTitle, vrHelp, correlationID);
sendRPCRequest(req);
}
/*End V1 Enhanced helper function*/
/**
* Sends a SetGlobalProperties RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param helpPrompt
* @param timeoutPrompt
* @param correlationID
* @throws SdlException
*/
public void setGlobalProperties(
String helpPrompt, String timeoutPrompt, Integer correlationID)
throws SdlException {
SetGlobalProperties req = RPCRequestFactory.buildSetGlobalProperties(helpPrompt,
timeoutPrompt, correlationID);
sendRPCRequest(req);
}
/**
* Sends a SetGlobalProperties RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param helpChunks
* @param timeoutChunks
* @param correlationID
* @throws SdlException
*/
public void setGlobalProperties(
Vector<TTSChunk> helpChunks, Vector<TTSChunk> timeoutChunks,
Integer correlationID) throws SdlException {
SetGlobalProperties req = RPCRequestFactory.buildSetGlobalProperties(
helpChunks, timeoutChunks, correlationID);
sendRPCRequest(req);
}
public void resetGlobalProperties(Vector<GlobalProperty> properties,
Integer correlationID) throws SdlException {
ResetGlobalProperties req = new ResetGlobalProperties();
req.setCorrelationID(correlationID);
req.setProperties(properties);
sendRPCRequest(req);
}
/**
* Sends a SetMediaClockTimer RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param hours
* @param minutes
* @param seconds
* @param updateMode
* @param correlationID
* @throws SdlException
*/
public void setMediaClockTimer(Integer hours,
Integer minutes, Integer seconds, UpdateMode updateMode,
Integer correlationID) throws SdlException {
SetMediaClockTimer msg = RPCRequestFactory.buildSetMediaClockTimer(hours,
minutes, seconds, updateMode, correlationID);
sendRPCRequest(msg);
}
/**
* Pauses the media clock. Responses are captured through callback on IProxyListener.
*
* @param correlationID
* @throws SdlException
*/
public void pauseMediaClockTimer(Integer correlationID)
throws SdlException {
SetMediaClockTimer msg = RPCRequestFactory.buildSetMediaClockTimer(0,
0, 0, UpdateMode.PAUSE, correlationID);
sendRPCRequest(msg);
}
/**
* Resumes the media clock. Responses are captured through callback on IProxyListener.
*
* @param correlationID
* @throws SdlException
*/
public void resumeMediaClockTimer(Integer correlationID)
throws SdlException {
SetMediaClockTimer msg = RPCRequestFactory.buildSetMediaClockTimer(0,
0, 0, UpdateMode.RESUME, correlationID);
sendRPCRequest(msg);
}
/**
* Clears the media clock. Responses are captured through callback on IProxyListener.
*
* @param correlationID
* @throws SdlException
*/
public void clearMediaClockTimer(Integer correlationID)
throws SdlException {
Show msg = RPCRequestFactory.buildShow(null, null, null, " ", null, null, correlationID);
sendRPCRequest(msg);
}
/*Begin V1 Enhanced helper*/
/**
* Sends a Show RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param mainText1 -Text displayed in a single or upper display line.
* @param mainText2 -Text displayed on the second display line.
* @param mainText3 -Text displayed on the second "page" first display line.
* @param mainText4 -Text displayed on the second "page" second display line.
* @param statusBar
* @param mediaClock -Text value for MediaClock field.
* @param mediaTrack -Text displayed in the track field.
* @param graphic -Image struct determining whether static or dynamic image to display in app.
* @param softButtons -App defined SoftButtons.
* @param customPresets -App labeled on-screen presets.
* @param alignment -Specifies how mainText1 and mainText2s texts should be aligned on display.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void show(String mainText1, String mainText2, String mainText3, String mainText4,
String statusBar, String mediaClock, String mediaTrack,
Image graphic, Vector<SoftButton> softButtons, Vector <String> customPresets,
TextAlignment alignment, Integer correlationID)
throws SdlException {
Show msg = RPCRequestFactory.buildShow(mainText1, mainText2, mainText3, mainText4,
statusBar, mediaClock, mediaTrack, graphic, softButtons, customPresets,
alignment, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a Show RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param mainText1 -Text displayed in a single or upper display line.
* @param mainText2 -Text displayed on the second display line.
* @param mainText3 -Text displayed on the second "page" first display line.
* @param mainText4 -Text displayed on the second "page" second display line.
* @param graphic -Image struct determining whether static or dynamic image to display in app.
* @param softButtons -App defined SoftButtons.
* @param customPresets -App labeled on-screen presets.
* @param alignment -Specifies how mainText1 and mainText2s texts should be aligned on display.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void show(String mainText1, String mainText2, String mainText3, String mainText4,
Image graphic, Vector<SoftButton> softButtons, Vector <String> customPresets,
TextAlignment alignment, Integer correlationID)
throws SdlException {
show(mainText1, mainText2, mainText3, mainText4, null, null, null, graphic, softButtons, customPresets, alignment, correlationID);
}
/*End V1 Enhanced helper*/
/**
* Sends a Show RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param mainText1 -Text displayed in a single or upper display line.
* @param mainText2 -Text displayed on the second display line.
* @param statusBar
* @param mediaClock -Text value for MediaClock field.
* @param mediaTrack -Text displayed in the track field.
* @param alignment -Specifies how mainText1 and mainText2s texts should be aligned on display.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void show(String mainText1, String mainText2,
String statusBar, String mediaClock, String mediaTrack,
TextAlignment alignment, Integer correlationID)
throws SdlException {
Show msg = RPCRequestFactory.buildShow(mainText1, mainText2,
statusBar, mediaClock, mediaTrack,
alignment, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a Show RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param mainText1 -Text displayed in a single or upper display line.
* @param mainText2 -Text displayed on the second display line.
* @param alignment -Specifies how mainText1 and mainText2s texts should be aligned on display.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void show(String mainText1, String mainText2,
TextAlignment alignment, Integer correlationID)
throws SdlException {
show(mainText1, mainText2, null, null, null, alignment, correlationID);
}
/**
* Sends a Speak RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsText -The text to speech message in the form of a string.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void speak(String ttsText, Integer correlationID)
throws SdlException {
Speak msg = RPCRequestFactory.buildSpeak(TTSChunkFactory.createSimpleTTSChunks(ttsText),
correlationID);
sendRPCRequest(msg);
}
/**
* Sends a Speak RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param ttsChunks -Text/phonemes to speak in the form of ttsChunks.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void speak(Vector<TTSChunk> ttsChunks,
Integer correlationID) throws SdlException {
Speak msg = RPCRequestFactory.buildSpeak(ttsChunks, correlationID);
sendRPCRequest(msg);
}
/**
* Sends a SubscribeButton RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param buttonName -Name of the button to subscribe.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void subscribeButton(ButtonName buttonName,
Integer correlationID) throws SdlException {
SubscribeButton msg = RPCRequestFactory.buildSubscribeButton(buttonName,
correlationID);
sendRPCRequest(msg);
}
// Protected unregisterAppInterface used to ensure no non-ALM app calls
// unregisterAppInterface.
protected void unregisterAppInterfacePrivate(Integer correlationID)
throws SdlException {
UnregisterAppInterface msg =
RPCRequestFactory.buildUnregisterAppInterface(correlationID);
Intent sendIntent = createBroadcastIntent();
updateBroadcastIntent(sendIntent, "RPC_NAME", FunctionID.UNREGISTER_APP_INTERFACE);
updateBroadcastIntent(sendIntent, "TYPE", RPCMessage.KEY_REQUEST);
updateBroadcastIntent(sendIntent, "CORRID", msg.getCorrelationID());
updateBroadcastIntent(sendIntent, "DATA",serializeJSON(msg));
sendBroadcastIntent(sendIntent);
sendRPCRequestPrivate(msg);
}
/**
* Sends an UnsubscribeButton RPCRequest to SDL. Responses are captured through callback on IProxyListener.
*
* @param buttonName -Name of the button to unsubscribe.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void unsubscribeButton(ButtonName buttonName,
Integer correlationID) throws SdlException {
UnsubscribeButton msg = RPCRequestFactory.buildUnsubscribeButton(
buttonName, correlationID);
sendRPCRequest(msg);
}
/**
* Creates a choice to be added to a choiceset. Choice has both a voice and a visual menu component.
*
* @param choiceID -Unique ID used to identify this choice (returned in callback).
* @param choiceMenuName -Text name displayed for this choice.
* @param choiceVrCommands -Vector of vrCommands used to select this choice by voice. Must contain
* at least one non-empty element.
* @return Choice created.
* @throws SdlException
*/
public Choice createChoiceSetChoice(Integer choiceID, String choiceMenuName,
Vector<String> choiceVrCommands) {
Choice returnChoice = new Choice();
returnChoice.setChoiceID(choiceID);
returnChoice.setMenuName(choiceMenuName);
returnChoice.setVrCommands(choiceVrCommands);
return returnChoice;
}
/**
* Starts audio pass thru session. Responses are captured through callback on IProxyListener.
*
* @param initialPrompt -SDL will speak this prompt before opening the audio pass thru session.
* @param audioPassThruDisplayText1 -First line of text displayed during audio capture.
* @param audioPassThruDisplayText2 -Second line of text displayed during audio capture.
* @param samplingRate -Allowable values of 8 khz or 16 or 22 or 44 khz.
* @param maxDuration -The maximum duration of audio recording in milliseconds.
* @param bitsPerSample -Specifies the quality the audio is recorded. Currently 8 bit or 16 bit.
* @param audioType -Specifies the type of audio data being requested.
* @param muteAudio -Defines if the current audio source should be muted during the APT session.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void performaudiopassthru(String initialPrompt, String audioPassThruDisplayText1, String audioPassThruDisplayText2,
SamplingRate samplingRate, Integer maxDuration, BitsPerSample bitsPerSample,
AudioType audioType, Boolean muteAudio, Integer correlationID) throws SdlException {
PerformAudioPassThru msg = RPCRequestFactory.BuildPerformAudioPassThru(initialPrompt, audioPassThruDisplayText1, audioPassThruDisplayText2,
samplingRate, maxDuration, bitsPerSample, audioType, muteAudio, correlationID);
sendRPCRequest(msg);
}
/**
* Ends audio pass thru session. Responses are captured through callback on IProxyListener.
*
* @param correlationID
* @throws SdlException
*/
public void endaudiopassthru(Integer correlationID) throws SdlException
{
EndAudioPassThru msg = RPCRequestFactory.BuildEndAudioPassThru(correlationID);
sendRPCRequest(msg);
}
/**
* Subscribes for specific published data items. The data will be only sent if it has changed.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Subscribes to GPS data.
* @param speed -Subscribes to vehicle speed data in kilometers per hour.
* @param rpm -Subscribes to number of revolutions per minute of the engine.
* @param fuelLevel -Subscribes to fuel level in the tank (percentage).
* @param fuelLevel_State -Subscribes to fuel level state.
* @param instantFuelConsumption -Subscribes to instantaneous fuel consumption in microlitres.
* @param externalTemperature -Subscribes to the external temperature in degrees celsius.
* @param prndl -Subscribes to PRNDL data that houses the selected gear.
* @param tirePressure -Subscribes to the TireStatus data containing status and pressure of tires.
* @param odometer -Subscribes to Odometer data in km.
* @param beltStatus -Subscribes to status of the seat belts.
* @param bodyInformation -Subscribes to body information including power modes.
* @param deviceStatus -Subscribes to device status including signal and battery strength.
* @param driverBraking -Subscribes to the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void subscribevehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean prndl, boolean tirePressure,
boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
SubscribeVehicleData msg = RPCRequestFactory.BuildSubscribeVehicleData(gps, speed, rpm, fuelLevel, fuelLevel_State, instantFuelConsumption, externalTemperature, prndl, tirePressure,
odometer, beltStatus, bodyInformation, deviceStatus, driverBraking, correlationID);
sendRPCRequest(msg);
}
/**
* Unsubscribes for specific published data items.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Unsubscribes to GPS data.
* @param speed -Unsubscribes to vehicle speed data in kilometers per hour.
* @param rpm -Unsubscribes to number of revolutions per minute of the engine.
* @param fuelLevel -Unsubscribes to fuel level in the tank (percentage).
* @param fuelLevel_State -Unsubscribes to fuel level state.
* @param instantFuelConsumption -Unsubscribes to instantaneous fuel consumption in microlitres.
* @param externalTemperature -Unsubscribes to the external temperature in degrees celsius.
* @param prndl -Unsubscribes to PRNDL data that houses the selected gear.
* @param tirePressure -Unsubscribes to the TireStatus data containing status and pressure of tires.
* @param odometer -Unsubscribes to Odometer data in km.
* @param beltStatus -Unsubscribes to status of the seat belts.
* @param bodyInformation -Unsubscribes to body information including power modes.
* @param deviceStatus -Unsubscribes to device status including signal and battery strength.
* @param driverBraking -Unsubscribes to the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void unsubscribevehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean prndl, boolean tirePressure,
boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
UnsubscribeVehicleData msg = RPCRequestFactory.BuildUnsubscribeVehicleData(gps, speed, rpm, fuelLevel, fuelLevel_State, instantFuelConsumption, externalTemperature, prndl, tirePressure,
odometer, beltStatus, bodyInformation, deviceStatus, driverBraking, correlationID);
sendRPCRequest(msg);
}
/**
* Performs a Non periodic vehicle data read request.
* Responses are captured through callback on IProxyListener.
*
* @param gps -Performs an ad-hoc request for GPS data.
* @param speed -Performs an ad-hoc request for vehicle speed data in kilometers per hour.
* @param rpm -Performs an ad-hoc request for number of revolutions per minute of the engine.
* @param fuelLevel -Performs an ad-hoc request for fuel level in the tank (percentage).
* @param fuelLevel_State -Performs an ad-hoc request for fuel level state.
* @param instantFuelConsumption -Performs an ad-hoc request for instantaneous fuel consumption in microlitres.
* @param externalTemperature -Performs an ad-hoc request for the external temperature in degrees celsius.
* @param vin -Performs an ad-hoc request for the Vehicle identification number
* @param prndl -Performs an ad-hoc request for PRNDL data that houses the selected gear.
* @param tirePressure -Performs an ad-hoc request for the TireStatus data containing status and pressure of tires.
* @param odometer -Performs an ad-hoc request for Odometer data in km.
* @param beltStatus -Performs an ad-hoc request for status of the seat belts.
* @param bodyInformation -Performs an ad-hoc request for body information including power modes.
* @param deviceStatus -Performs an ad-hoc request for device status including signal and battery strength.
* @param driverBraking -Performs an ad-hoc request for the status of the brake pedal.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void getvehicledata(boolean gps, boolean speed, boolean rpm, boolean fuelLevel, boolean fuelLevel_State,
boolean instantFuelConsumption, boolean externalTemperature, boolean vin, boolean prndl, boolean tirePressure,
boolean odometer, boolean beltStatus, boolean bodyInformation, boolean deviceStatus,
boolean driverBraking, Integer correlationID) throws SdlException
{
GetVehicleData msg = RPCRequestFactory.BuildGetVehicleData(gps, speed, rpm, fuelLevel, fuelLevel_State, instantFuelConsumption, externalTemperature, vin, prndl, tirePressure, odometer,
beltStatus, bodyInformation, deviceStatus, driverBraking, correlationID);
sendRPCRequest(msg);
}
/**
* Creates a full screen overlay containing a large block of formatted text that can be scrolled with up to 8 SoftButtons defined.
* Responses are captured through callback on IProxyListener.
*
* @param scrollableMessageBody -Body of text that can include newlines and tabs.
* @param timeout -App defined timeout. Indicates how long of a timeout from the last action.
* @param softButtons -App defined SoftButtons.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void scrollablemessage(String scrollableMessageBody, Integer timeout, Vector<SoftButton> softButtons, Integer correlationID) throws SdlException
{
ScrollableMessage msg = RPCRequestFactory.BuildScrollableMessage(scrollableMessageBody, timeout, softButtons, correlationID);
sendRPCRequest(msg);
}
/**
* Creates a full screen or pop-up overlay (depending on platform) with a single user controlled slider.
* Responses are captured through callback on IProxyListener.
*
* @param numTicks -Number of selectable items on a horizontal axis.
* @param position -Initial position of slider control (cannot exceed numTicks).
* @param sliderHeader -Text header to display.
* @param sliderFooter - Text footer to display (meant to display min/max threshold descriptors).
* @param timeout -App defined timeout. Indicates how long of a timeout from the last action.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void slider(Integer numTicks, Integer position, String sliderHeader, Vector<String> sliderFooter, Integer timeout, Integer correlationID) throws SdlException
{
Slider msg = RPCRequestFactory.BuildSlider(numTicks, position, sliderHeader, sliderFooter, timeout, correlationID);
sendRPCRequest(msg);
}
/**
* Responses are captured through callback on IProxyListener.
*
* @param language
* @param hmiDisplayLanguage
* @param correlationID
* @throws SdlException
*/
public void changeregistration(Language language, Language hmiDisplayLanguage, Integer correlationID) throws SdlException
{
ChangeRegistration msg = RPCRequestFactory.BuildChangeRegistration(language, hmiDisplayLanguage, correlationID);
sendRPCRequest(msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile device.
* Responses are captured through callback on IProxyListener.
*
* @param is - The input stream of byte data that PutFileStream will read from
* @param sdlFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of the file.
* A value greater than zero is used for resuming partial data chunks.
* @param iLength - The total length of the file being sent.
* @throws SdlException
*/
public void PutFileStream(InputStream is, String sdlFileName, Integer iOffset, Integer iLength) throws SdlException
{
PutFile msg = RPCRequestFactory.buildPutFile(sdlFileName, iOffset, iLength);
startRPCStream(is, msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile device.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of a file.
* A value greater than zero is used for resuming partial data chunks.
* @param iLength - The total length of the file being sent.
*
* @return OutputStream - The output stream of byte data that is written to by the app developer
* @throws SdlException
*/
public OutputStream PutFileStream(String sdlFileName, Integer iOffset, Integer iLength) throws SdlException
{
PutFile msg = RPCRequestFactory.buildPutFile(sdlFileName, iOffset, iLength);
return startRPCStream(msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile device.
* Responses are captured through callback on IProxyListener.
*
* @param is - The input stream of byte data that PutFileStream will read from
* @param syncFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of the file.
* A value greater than zero is used for resuming partial data chunks.
* @param iLength - The total length of the file being sent.
* @param fileType - The selected file type -- see the FileType enumeration for details
* @param bPersistentFile - Indicates if the file is meant to persist between sessions / ignition cycles.
* @param bSystemFile - Indicates if the file is meant to be passed thru core to elsewhere on the system.
* @throws SyncException
*/
public void PutFileStream(InputStream is, String syncFileName, Integer iOffset, Integer iLength, FileType fileType, Boolean bPersistentFile, Boolean bSystemFile) throws SdlException
{
PutFile msg = RPCRequestFactory.buildPutFile(syncFileName, iOffset, iLength, fileType, bPersistentFile, bSystemFile);
startRPCStream(is, msg);
}
/**
* Used to push a binary stream of file data onto the module from a mobile device.
* Responses are captured through callback on IProxyListener.
*
* @param syncFileName - The file reference name used by the putFile RPC.
* @param iOffset - The data offset in bytes, a value of zero is used to indicate data starting from the beginging of a file.
* A value greater than zero is used for resuming partial data chunks.
* @param iLength - The total length of the file being sent.
* @param fileType - The selected file type -- see the FileType enumeration for details
* @param bPersistentFile - Indicates if the file is meant to persist between sessions / ignition cycles.
* @param bSystemFile - Indicates if the file is meant to be passed thru core to elsewhere on the system.
* @return OutputStream - The output stream of byte data that is written to by the app developer
* @throws SyncException
*/
public OutputStream PutFileStream(String syncFileName, Integer iOffset, Integer iLength, FileType fileType, Boolean bPersistentFile, Boolean bSystemFile) throws SdlException
{
PutFile msg = RPCRequestFactory.buildPutFile(syncFileName, iOffset, iLength, fileType, bPersistentFile, bSystemFile);
return startRPCStream(msg);
}
/**
*
* Used to end an existing PutFileStream that was previously initiated with any PutFileStream method.
*
*/
public void endPutFileStream()
{
endRPCStream();
}
/**
* Used to push a binary data onto the SDL module from a mobile device, such as icons and album art. Not supported on first generation SDL vehicles.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName -File reference name.
* @param fileType -Selected file type.
* @param persistentFile -Indicates if the file is meant to persist between sessions / ignition cycles.
* @param fileData
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void putfile(String sdlFileName, FileType fileType, Boolean persistentFile, byte[] fileData, Integer correlationID) throws SdlException
{
PutFile msg = RPCRequestFactory.buildPutFile(sdlFileName, fileType, persistentFile, fileData, correlationID);
sendRPCRequest(msg);
}
/**
* Used to delete a file resident on the SDL module in the app's local cache. Not supported on first generation SDL vehicles.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName -File reference name.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void deletefile(String sdlFileName, Integer correlationID) throws SdlException
{
DeleteFile msg = RPCRequestFactory.buildDeleteFile(sdlFileName, correlationID);
sendRPCRequest(msg);
}
/**
* Requests the current list of resident filenames for the registered app. Not supported on first generation SDL vehicles.
* Responses are captured through callback on IProxyListener.
*
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void listfiles(Integer correlationID) throws SdlException
{
ListFiles msg = RPCRequestFactory.buildListFiles(correlationID);
sendRPCRequest(msg);
}
/**
* Used to set existing local file on SDL as the app's icon. Not supported on first generation SDL vehicles.
* Responses are captured through callback on IProxyListener.
*
* @param sdlFileName -File reference name.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void setappicon(String sdlFileName, Integer correlationID) throws SdlException
{
SetAppIcon msg = RPCRequestFactory.buildSetAppIcon(sdlFileName, correlationID);
sendRPCRequest(msg);
}
/**
* Set an alternate display layout. If not sent, default screen for given platform will be shown.
* Responses are captured through callback on IProxyListener.
*
* @param displayLayout -Predefined or dynamically created screen layout.
* @param correlationID -A unique ID that correlates each RPCRequest and RPCResponse.
* @throws SdlException
*/
public void setdisplaylayout(String displayLayout, Integer correlationID) throws SdlException
{
SetDisplayLayout msg = RPCRequestFactory.BuildSetDisplayLayout(displayLayout, correlationID);
sendRPCRequest(msg);
}
/**
* Gets type of transport currently used by this SdlProxy.
*
* @return One of TransportType enumeration values.
*
* @see TransportType
*/
public TransportType getCurrentTransportType() throws IllegalStateException {
if (sdlSession == null) {
throw new IllegalStateException("Incorrect state of SdlProxyBase: Calling for getCurrentTransportType() while connection is not initialized");
}
return sdlSession.getCurrentTransportType();
}
public String getAppName()
{
return _applicationName;
}
public String getNgnAppName()
{
return _ngnMediaScreenAppName;
}
public String getAppID()
{
return _appID;
}
public long getInstanceDT()
{
return instanceDateTime;
}
public void setConnectionDetails(String sDetails)
{
sConnectionDetails = sDetails;
}
public String getConnectionDetails()
{
return sConnectionDetails;
}
//for testing only
public void setPoliciesURL(String sText)
{
sPoliciesURL = sText;
}
//for testing only
public String getPoliciesURL()
{
return sPoliciesURL;
}
} // end-class
|
package gw.internal.gosu.parser;
import gw.config.CommonServices;
import gw.config.ExecutionMode;
import gw.fs.IFile;
import gw.internal.gosu.dynamic.DynamicConstructorInfo;
import gw.internal.gosu.dynamic.DynamicMethodInfo;
import gw.internal.gosu.ir.transform.util.IRTypeResolver;
import gw.internal.gosu.ir.transform.util.NameResolver;
import gw.internal.gosu.parser.expressions.*;
import gw.internal.gosu.parser.statements.ArrayAssignmentStatement;
import gw.internal.gosu.parser.statements.AssertStatement;
import gw.internal.gosu.parser.statements.AssignmentStatement;
import gw.internal.gosu.parser.statements.BeanMethodCallStatement;
import gw.internal.gosu.parser.statements.BlockInvocationStatement;
import gw.internal.gosu.parser.statements.BreakStatement;
import gw.internal.gosu.parser.statements.CaseClause;
import gw.internal.gosu.parser.statements.CatchClause;
import gw.internal.gosu.parser.statements.ClasspathStatement;
import gw.internal.gosu.parser.statements.ContinueStatement;
import gw.internal.gosu.parser.statements.DelegateStatement;
import gw.internal.gosu.parser.statements.DoWhileStatement;
import gw.internal.gosu.parser.statements.EvalStatement;
import gw.internal.gosu.parser.statements.ForEachStatement;
import gw.internal.gosu.parser.statements.FunctionStatement;
import gw.internal.gosu.parser.statements.HideFieldNoOpStatement;
import gw.internal.gosu.parser.statements.IfStatement;
import gw.internal.gosu.parser.statements.LoopStatement;
import gw.internal.gosu.parser.statements.MapAssignmentStatement;
import gw.internal.gosu.parser.statements.MemberAssignmentStatement;
import gw.internal.gosu.parser.statements.MethodCallStatement;
import gw.internal.gosu.parser.statements.NamespaceStatement;
import gw.internal.gosu.parser.statements.NewStatement;
import gw.internal.gosu.parser.statements.NoOpStatement;
import gw.internal.gosu.parser.statements.NotAStatement;
import gw.internal.gosu.parser.statements.PropertyStatement;
import gw.internal.gosu.parser.statements.ReturnStatement;
import gw.internal.gosu.parser.statements.StatementList;
import gw.internal.gosu.parser.statements.SwitchStatement;
import gw.internal.gosu.parser.statements.SyntheticMemberAccessStatement;
import gw.internal.gosu.parser.statements.ThrowStatement;
import gw.internal.gosu.parser.statements.TryCatchFinallyStatement;
import gw.internal.gosu.parser.statements.TypeLoaderStatement;
import gw.internal.gosu.parser.statements.UsesStatement;
import gw.internal.gosu.parser.statements.UsesStatementList;
import gw.internal.gosu.parser.statements.UsingStatement;
import gw.internal.gosu.parser.statements.VarStatement;
import gw.internal.gosu.parser.statements.WhileStatement;
import gw.internal.gosu.parser.types.ConstructorType;
import gw.internal.gosu.template.TemplateGenerator;
import gw.internal.gosu.util.StringUtil;
import gw.lang.IReentrant;
import gw.lang.annotation.UsageTarget;
import gw.lang.function.IBlock;
import gw.lang.ir.IRElement;
import gw.lang.ir.IRType;
import gw.lang.parser.AnnotationUseSiteTarget;
import gw.lang.parser.ExternalSymbolMapForMap;
import gw.lang.parser.GosuParserFactory;
import gw.lang.parser.GosuParserTypes;
import gw.lang.parser.IBlockClass;
import gw.lang.parser.ICapturedSymbol;
import gw.lang.parser.ICoercer;
import gw.lang.parser.ICoercionManager;
import gw.lang.parser.IDynamicFunctionSymbol;
import gw.lang.parser.IDynamicPropertySymbol;
import gw.lang.parser.IDynamicSymbol;
import gw.lang.parser.IExpression;
import gw.lang.parser.IFileContext;
import gw.lang.parser.IFunctionSymbol;
import gw.lang.parser.IGosuParser;
import gw.lang.parser.IGosuValidator;
import gw.lang.parser.IHasArguments;
import gw.lang.parser.IHasInnerClass;
import gw.lang.parser.IInjectedSymbol;
import gw.lang.parser.ILanguageLevel;
import gw.lang.parser.IParseIssue;
import gw.lang.parser.IParseResult;
import gw.lang.parser.IParseTree;
import gw.lang.parser.IParsedElement;
import gw.lang.parser.IParsedElementWithAtLeastOneDeclaration;
import gw.lang.parser.IParserState;
import gw.lang.parser.IResolvingCoercer;
import gw.lang.parser.IScriptPartId;
import gw.lang.parser.ISource;
import gw.lang.parser.ISourceCodeTokenizer;
import gw.lang.parser.ISymbol;
import gw.lang.parser.ISymbolTable;
import gw.lang.parser.ITokenizerInstructor;
import gw.lang.parser.ITypeUsesMap;
import gw.lang.parser.Keyword;
import gw.lang.parser.MemberAccessKind;
import gw.lang.parser.ParserOptions;
import gw.lang.parser.PostCompilationAnalysis;
import gw.lang.parser.ScriptabilityModifiers;
import gw.lang.parser.SourceCodeReader;
import gw.lang.parser.StandardCoercionManager;
import gw.lang.parser.StandardScope;
import gw.lang.parser.SymbolType;
import gw.lang.parser.ThreadSafeSymbolTable;
import gw.lang.parser.TypeSystemAwareCache;
import gw.lang.parser.TypeVarToTypeMap;
import gw.lang.parser.coercers.IdentityCoercer;
import gw.lang.parser.exceptions.DoesNotOverrideFunctionException;
import gw.lang.parser.exceptions.ErrantGosuClassException;
import gw.lang.parser.exceptions.NoCtorFoundException;
import gw.lang.parser.exceptions.ParseException;
import gw.lang.parser.exceptions.ParseIssue;
import gw.lang.parser.exceptions.ParseResultsException;
import gw.lang.parser.exceptions.ParseWarning;
import gw.lang.parser.exceptions.ParseWarningForDeprecatedMember;
import gw.lang.parser.exceptions.WrongNumberOfArgsException;
import gw.lang.parser.expressions.IArithmeticExpression;
import gw.lang.parser.expressions.IBlockInvocation;
import gw.lang.parser.expressions.IConditionalExpression;
import gw.lang.parser.expressions.IImplicitTypeAsExpression;
import gw.lang.parser.expressions.IInferredNewExpression;
import gw.lang.parser.expressions.IInitializerExpression;
import gw.lang.parser.expressions.ILiteralExpression;
import gw.lang.parser.expressions.IParenthesizedExpression;
import gw.lang.parser.expressions.IProgram;
import gw.lang.parser.expressions.ISynthesizedMemberAccessExpression;
import gw.lang.parser.expressions.ITypeLiteralExpression;
import gw.lang.parser.expressions.ITypeParameterListClause;
import gw.lang.parser.expressions.ITypeVariableDefinition;
import gw.lang.parser.expressions.ITypeVariableDefinitionExpression;
import gw.lang.parser.expressions.IVarStatement;
import gw.lang.parser.expressions.Variance;
import gw.lang.parser.resources.Res;
import gw.lang.parser.resources.ResourceKey;
import gw.lang.parser.statements.IClasspathStatement;
import gw.lang.parser.statements.ITerminalStatement;
import gw.lang.parser.statements.ITypeLoaderStatement;
import gw.lang.parser.statements.IUsesStatement;
import gw.lang.parser.statements.TerminalType;
import gw.lang.parser.template.TemplateParseException;
import gw.lang.reflect.ConstructorInfoBuilder;
import gw.lang.reflect.FeatureManager;
import gw.lang.reflect.FunctionType;
import gw.lang.reflect.IAnnotationInfo;
import gw.lang.reflect.IAttributedFeatureInfo;
import gw.lang.reflect.IBlockType;
import gw.lang.reflect.ICanBeAnnotation;
import gw.lang.reflect.IConstructorInfo;
import gw.lang.reflect.IConstructorType;
import gw.lang.reflect.IDynamicType;
import gw.lang.reflect.IEnumValue;
import gw.lang.reflect.IErrorType;
import gw.lang.reflect.IFeatureInfo;
import gw.lang.reflect.IFunctionType;
import gw.lang.reflect.IInvocableType;
import gw.lang.reflect.IMetaType;
import gw.lang.reflect.IMethodInfo;
import gw.lang.reflect.INamespaceType;
import gw.lang.reflect.IOptionalParamCapable;
import gw.lang.reflect.IParameterInfo;
import gw.lang.reflect.IPlaceholder;
import gw.lang.reflect.IPropertyInfo;
import gw.lang.reflect.IPropertyInfoDelegate;
import gw.lang.reflect.IRelativeTypeInfo;
import gw.lang.reflect.IScriptabilityModifier;
import gw.lang.reflect.IType;
import gw.lang.reflect.ITypeInfo;
import gw.lang.reflect.ITypeVariableType;
import gw.lang.reflect.MethodList;
import gw.lang.reflect.MethodScore;
import gw.lang.reflect.MethodScorer;
import gw.lang.reflect.Modifier;
import gw.lang.reflect.TypeInfoUtil;
import gw.lang.reflect.TypeSystem;
import gw.lang.reflect.gs.ClassType;
import gw.lang.reflect.gs.GosuClassTypeLoader;
import gw.lang.reflect.gs.ICompilableType;
import gw.lang.reflect.gs.IGenericTypeVariable;
import gw.lang.reflect.gs.IGosuArrayClass;
import gw.lang.reflect.gs.IGosuClass;
import gw.lang.reflect.gs.IGosuClassTypeInfo;
import gw.lang.reflect.gs.IGosuEnhancement;
import gw.lang.reflect.gs.IGosuFragment;
import gw.lang.reflect.gs.IGosuProgram;
import gw.lang.reflect.gs.IGosuPropertyInfo;
import gw.lang.reflect.gs.IGosuVarPropertyInfo;
import gw.lang.reflect.gs.ISourceFileHandle;
import gw.lang.reflect.gs.StringSourceFileHandle;
import gw.lang.reflect.java.GosuTypes;
import gw.lang.reflect.java.IJavaPropertyInfo;
import gw.lang.reflect.java.IJavaType;
import gw.lang.reflect.java.JavaTypes;
import gw.lang.reflect.module.IModule;
import gw.util.DynamicArray;
import gw.util.GosuExceptionUtil;
import gw.util.GosuObjectUtil;
import gw.util.Pair;
import gw.util.Rational;
import gw.util.SpaceEfficientHashMap;
import gw.util.Stack;
import gw.util.concurrent.Cache;
import java.io.Closeable;
import java.io.IOException;
import java.io.StringReader;
import java.lang.annotation.ElementType;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
@SuppressWarnings({"ThrowableInstanceNeverThrown"})
public final class GosuParser extends ParserBase implements IGosuParser
{
public static final IType PENDING_BOUNDING_TYPE = ErrorType.getInstance( "Pending Bounding Type");
public static ErrorType notfound = ErrorType.getInstance( "_notfound_" );
private SourceCodeTokenizer _tokenizer;
private ISymbolTable _symTable;
private boolean _bParsed;
private Stack<ParsedElement> _stack;
private Stack<DynamicFunctionSymbol> _stackDFS;
private List<ParseTree> _locations;
private Program _parsingProgram;
private ArrayList<FunctionType> _parsingFunctions;
private boolean _parsingNewExpressionTypeLiteral;
private ArrayList<VarStatement> _parsingFieldInitializer;
private Map<String, List<IFunctionSymbol>> _dfsDeclByName;
private ITypeUsesMap _typeUsesMap;
private String _strNamespace;
private ITokenizerInstructor _tokenizerInstructor;
private IScriptabilityModifier _scriptabilityConstraint;
private int _iBreakOk;
private int _iContinueOk;
int _iReturnOk;
private Stack<IScriptPartId> _scriptPartIdStack;
private Stack<TypeVarToTypeMap> _inferenceMapStack;
private HashMap<String, ITypeVariableDefinition> _typeVarsByName;
private Stack<ContextType> _inferredContextStack = new Stack<>();
private boolean _bThrowForWarnings;
private boolean _bStudioEditorParser;
private boolean _bWarnOnCaseIssue;
private Stack<Boolean> _parsingAbstractConstructor;
private ContextInferenceManager _ctxInferenceMgr = new ContextInferenceManager();
private Stack<IType> _blockReturnTypeStack = new Stack<>();
private Stack<Boolean> _parsingStaticFeature;
private boolean _bCaptureSymbolsForEval;
private boolean _parsingAnnotation;
private boolean _allowWildcards;
private int _ignoreTypeDeprecation;
private boolean _bGenRootExprAccess;
private boolean _bProgramCallFunction;
private Map<String, IType> _typeCache;
private int _iStmtDepth;
private List<ParseTree> _savedLocations;
private Boolean _bAreUsingStatementsAllowedInStatementLists;
GosuParser( ISymbolTable symTable, IScriptabilityModifier scriptabilityConstraint )
{
this(symTable, scriptabilityConstraint, CommonServices.getEntityAccess().getDefaultTypeUses());
}
GosuParser( ISymbolTable symTable, IScriptabilityModifier scriptabilityConstraint, ITypeUsesMap tuMap )
{
super();
setOwner( this );
setWarnOnCaseIssue( Settings.WARN_ON_CASE_DEFAULT.get() == Boolean.TRUE);
setEditorParser( Settings.IDE_EDITOR_PARSER_DEFAULT.get() == Boolean.TRUE);
_symTable = symTable;
//noinspection unchecked
_typeUsesMap = tuMap.copy();
_scriptabilityConstraint = scriptabilityConstraint;
_dfsDeclByName = new HashMap<>();
_stack = new Stack<>();
_stackDFS = new Stack<>();
_locations = new ArrayList<>();
_parsingFunctions = new ArrayList<>();
_parsingFieldInitializer = new ArrayList<>();
_typeVarsByName = new HashMap<>( 2 );
_parsingStaticFeature = new Stack<>();
_parsingAbstractConstructor = new Stack<>();
_typeCache = new HashMap<>();
_bParsed = false;
_iReturnOk = 1;
_allowWildcards = false;
}
@Override
public void setValidator( IGosuValidator validator )
{
super.setValidator( validator );
}
// -- IGosuParser implementation --
public IScriptPartId getScriptPart()
{
if( _scriptPartIdStack == null || _scriptPartIdStack.isEmpty() )
{
return null;
}
return _scriptPartIdStack.peek();
}
public Stack<IScriptPartId> getScriptPartIdStack()
{
return _scriptPartIdStack;
}
public void pushScriptPart( IScriptPartId partId )
{
if( _scriptPartIdStack == null )
{
_scriptPartIdStack = new Stack<>();
}
_scriptPartIdStack.push( partId );
}
void popScriptPart( IScriptPartId partId )
{
IScriptPartId top = _scriptPartIdStack.pop();
if( top != partId )
{
throw new IllegalStateException( "Unbalanced push/pop script id" );
}
}
public TypeVarToTypeMap getInferenceMap()
{
if( _inferenceMapStack == null || _inferenceMapStack.isEmpty() )
{
return null;
}
return _inferenceMapStack.peek();
}
public void pushInferenceMap( TypeVarToTypeMap inferenceMap )
{
if( _inferenceMapStack == null )
{
_inferenceMapStack = new Stack<>();
}
_inferenceMapStack.push( inferenceMap );
}
void popInferenceMap( TypeVarToTypeMap inferenceMap )
{
TypeVarToTypeMap top = _inferenceMapStack.pop();
if( top != inferenceMap )
{
throw new IllegalStateException( "Unbalanced push/pop inferenceMap" );
}
}
public void setScript( CharSequence source )
{
if( source == null )
{
source = "";
}
setScript( new SourceCodeReader( source ) );
}
public void setScript( ISource src )
{
if( src == null )
{
setScript( (CharSequence)null );
return;
}
ISourceCodeTokenizer tokenizer = src.getTokenizer();
if( tokenizer == null )
{
setScript( src.getSource() );
src.setTokenizer( _tokenizer );
_tokenizer.setInstructor( _tokenizerInstructor );
if( _tokenizerInstructor != null )
{
_tokenizerInstructor.setTokenizer( _tokenizer );
}
else if( getGosuClass() instanceof IGosuTemplateInternal &&
((IGosuTemplateInternal)getGosuClass()).getTokenizerInstructor() != null )
{
_tokenizerInstructor = ((IGosuTemplateInternal)getGosuClass()).getTokenizerInstructor();
_tokenizerInstructor.setTokenizer( _tokenizer );
if( _tokenizer.getInstructor() == null )
{
_tokenizer.setInstructor( _tokenizerInstructor );
}
}
}
else
{
_tokenizer = (SourceCodeTokenizer)tokenizer;
_tokenizer.setInstructor( _tokenizerInstructor );
if( _tokenizerInstructor != null )
{
_tokenizerInstructor.setTokenizer( _tokenizer );
}
_tokenizer.reset();
}
reset();
}
public void setScript( SourceCodeReader reader )
{
if( _tokenizer == null )
{
_tokenizer = new SourceCodeTokenizer( reader, _tokenizerInstructor );
// Initialize the tokenizer
_tokenizer.wordChars( '_', '_' );
}
else
{
_tokenizer.reset( reader );
}
reset();
}
public void resetScript()
{
_tokenizer.reset();
reset();
}
private void reset()
{
_stack.clear();
_stackDFS.clear();
_dfsDeclByName.clear();
_typeUsesMap.clearNonDefaultTypeUses();
_strNamespace = null;
_locations.clear();
_parsingFunctions.clear();
_parsingFieldInitializer.clear();
_typeVarsByName.clear();
_typeCache.clear();
setParsed( false );
}
@Override
protected String getScript()
{
return _tokenizer.getSource();
}
@Override
public ISymbolTable getSymbolTable()
{
return _symTable;
}
public void setSymbolTable( ISymbolTable symTable )
{
_symTable = symTable;
}
public ITypeUsesMap getTypeUsesMap()
{
return _typeUsesMap;
}
public void setTypeUsesMap( ITypeUsesMap typeUsesMap )
{
_typeUsesMap = typeUsesMap == null ? null : typeUsesMap.copyLocalScope();
}
public String getNamespace()
{
return _strNamespace;
}
void setNamespace( String strNamespace )
{
_strNamespace = strNamespace;
if( _strNamespace != null )
{
getTypeUsesMap().addToTypeUses( strNamespace + ".*" );
}
}
public IScriptabilityModifier getVisibilityConstraint()
{
return _scriptabilityConstraint;
}
public ITokenizerInstructor getTokenizerInstructor()
{
return _tokenizerInstructor;
}
public void setTokenizerInstructor( ITokenizerInstructor tokenizerInstructor )
{
_tokenizerInstructor = tokenizerInstructor;
if( _tokenizer != null )
{
_tokenizer.setInstructor( _tokenizerInstructor );
}
}
public FunctionType peekParsingFunction()
{
return _parsingFunctions.get( 0 );
}
public Iterator<FunctionType> iterateParsingFunctions()
{
return _parsingFunctions.iterator();
}
FunctionType popParsingFunction()
{
return _parsingFunctions.remove( 0 );
}
void pushParsingFunction( FunctionType functionType )
{
_parsingFunctions.add( 0, functionType );
}
public boolean isParsingFunction()
{
return _parsingFunctions.size() > 0;
}
private boolean isParsingTypeListeralForNewExpression()
{
return _parsingNewExpressionTypeLiteral;
}
private void setParsingTypeLiteralForNewExpression( boolean value )
{
_parsingNewExpressionTypeLiteral = value;
}
public VarStatement peekParsingFieldInitializer()
{
return _parsingFieldInitializer.get( 0 );
}
VarStatement popParsingFieldInitializer()
{
return _parsingFieldInitializer.remove( 0 );
}
void pushParsingFieldInitializer( VarStatement VarStatement )
{
_parsingFieldInitializer.add( 0, VarStatement );
}
public boolean isParsingFieldInitializer()
{
return !isParsingFunction() &&
!isParsingBlock() &&
_parsingFieldInitializer.size() > 0;
}
Program peekParsingProgram()
{
return _parsingProgram;
}
boolean isParsingProgram()
{
return _parsingProgram != null;
}
public Statement parseStatements( IScriptPartId partId ) throws ParseResultsException
{
Statement stmt = parseStatements( partId, true, true );
assignRuntime( stmt, true, null, null, partId);
return stmt;
}
private Statement parseStatements( IScriptPartId partId, boolean verify, boolean isolatedScope ) throws ParseResultsException
{
pushScriptPart( partId );
try
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
_parseStatements( isolatedScope );
Statement stmtList = peekStatement();
verify( stmtList, match( null, SourceCodeTokenizer.TT_EOF ), Res.MSG_END_OF_STMT );
_bParsed = true;
if( stmtList instanceof StatementList )
{
setLocation( iOffset, iLineNum, iColumn, true );
}
if( verify )
{
verifyParsedElement( stmtList );
}
return stmtList;
}
finally
{
popScriptPart( partId );
}
}
void _parseStatements( boolean isolatedScope )
{
StatementList stmtList = new StatementList( _symTable );
_tokenizer.nextToken();
if( _tokenizer.getInstructor() == null )
{
parseProgramClasspathStatements();
parseProgramTypeLoaderStatements();
}
if ( isolatedScope )
{
_symTable.pushScope();
}
try
{
UsesStatementList usesStmtList = parseUsesStatementList( true );
List<Statement> statements = new ArrayList<>();
parseStatementsAndDetectUnreachable( statements );
if( usesStmtList != null )
{
statements.add( 0, usesStmtList );
}
stmtList.setStatements( statements );
pushStatement( isDontOptimizeStatementLists() ? stmtList : stmtList.getSelfOrSingleStatement() );
}
finally
{
if ( isolatedScope )
{
_symTable.popScope();
}
}
}
void parseStatementsAndDetectUnreachable( List<Statement> statements )
{
for( Statement stmt = null; parseStatement(); )
{
stmt = popStatementAndDetectUnreachable( stmt, statements );
}
}
private Statement popStatementAndDetectUnreachable( Statement previousStatement, List<Statement> statements )
{
Statement currentStatement = popStatement();
if( !(previousStatement instanceof ReturnStatement) || !(currentStatement instanceof NotAStatement) )
{
boolean[] bAbsolute = {false};
verifyOrWarn( currentStatement,
previousStatement == null ||
currentStatement.isNoOp() ||
previousStatement.getLeastSignificantTerminalStatement( bAbsolute ) == null || !bAbsolute[0],
!CommonServices.getEntityAccess().isUnreachableCodeDetectionOn(), Res.MSG_UNREACHABLE_STMT );
}
if( isParsingFunction() && !isParsingBlock() )
{
IType returnType = peekParsingFunction().getReturnType();
if( previousStatement instanceof ReturnStatement && returnType == JavaTypes.pVOID() )
{
if( currentStatement instanceof NotAStatement ||
((currentStatement instanceof BeanMethodCallStatement ||
currentStatement instanceof MethodCallStatement ||
currentStatement instanceof MemberAssignmentStatement) &&
currentStatement.getLineNum() == previousStatement.getLineNum()) ||
(currentStatement instanceof NoOpStatement && isUnexpectedToken( currentStatement )) )
{
if( isUnexpectedToken( currentStatement ) )
{
currentStatement.clearParseExceptions();
}
verify( currentStatement, false, Res.MSG_RETURN_VAL_FROM_VOID_FUNCTION );
}
}
}
statements.add( currentStatement );
return currentStatement.isNoOp() ? previousStatement : currentStatement;
}
private boolean isUnexpectedToken( Statement statement )
{
for( IParseIssue issue : statement.getParseExceptions() )
{
if( issue.getMessageKey() == Res.MSG_UNEXPECTED_TOKEN )
{
return true;
}
}
return false;
}
public IProgram parseProgram( IScriptPartId partId ) throws ParseResultsException
{
return parseProgram( partId, true, null );
}
public IProgram parseProgram( IScriptPartId partId, IType expectedExpressionType ) throws ParseResultsException
{
return parseProgram( partId, true, expectedExpressionType );
}
public IProgram parseProgram( IScriptPartId partId, IType expectedExpressionType, IFileContext ctx, boolean assignRuntime ) throws ParseResultsException
{
return parseProgram( partId, true, false, expectedExpressionType, ctx, assignRuntime );
}
public IProgram parseProgram( IScriptPartId partId, IType expectedExpressionType, IFileContext ctx, boolean assignRuntime, boolean bDoNotThrowParseResultsException ) throws ParseResultsException
{
return parseProgram( partId, true, false, expectedExpressionType, ctx, assignRuntime, bDoNotThrowParseResultsException );
}
public IProgram parseProgram( IScriptPartId partId, boolean isolatedScope, IType expectedExpressionType ) throws ParseResultsException
{
return parseProgram( partId, true, false, expectedExpressionType, null, true );
}
public IProgram parseProgram( IScriptPartId partId, boolean isolatedScope, boolean reallyIsolatedScope, IType expectedExpressionType, IFileContext ctx, boolean assignRuntime ) throws ParseResultsException
{
return parseProgram( partId, isolatedScope, reallyIsolatedScope, expectedExpressionType, ctx, assignRuntime, false );
}
public IProgram parseProgram( IScriptPartId partId, boolean isolatedScope, boolean reallyIsolatedScope, IType expectedExpressionType, IFileContext ctx, boolean assignRuntime, boolean bDoNotThrowParseResultsException ) throws ParseResultsException
{
return parseProgram( partId, isolatedScope, reallyIsolatedScope, expectedExpressionType, ctx, assignRuntime, bDoNotThrowParseResultsException, null );
}
public IProgram parseProgram( IScriptPartId partId, boolean isolatedScope, boolean reallyIsolatedScope, IType expectedExpressionType, IFileContext ctx, boolean assignRuntime, boolean bDoNotThrowParseResultsException, IType superType ) throws ParseResultsException
{
Program program = new Program();
program.setDeclaredReturnType(expectedExpressionType);
_parsingProgram = program;
pushScriptPart( partId );
try
{
try
{
_tokenizer.nextToken();
if( _tokenizer.getInstructor() == null )
{
parseProgramClasspathStatements();
}
if (superType != null)
{
IGosuClassInternal superTypeGosuClass = GosuClass.Util.getGosuClassFrom(superType);
superTypeGosuClass.putClassMembers(this, getSymbolTable(), null, false );
}
GosuClassParser.putTypeUsesMapFeatures( this, getSymbolTable(), null );
if ( isolatedScope )
{
if ( reallyIsolatedScope ) {
_symTable.pushIsolatedScope( new GosuParserTransparentActivationContext( partId ) );
} else {
_symTable.pushScope();
}
}
try
{
// First just find and parse the function Declarations
for( ISymbol function = parseFunctionOrPropertyDeclaration( program );
function != null; function = parseFunctionOrPropertyDeclaration( program ) )
{
_symTable.putSymbol( function );
}
// Next we have to reset the tokenizer to the beginning.
_tokenizer.reset();
_locations.clear();
// Now parse the program as normal....
// Note function definitions are parsed as no-op statements, but are
// pushed onto the dynamic function symobl stack.
Statement mainStatement = parseStatements( getScriptPart(), false, false );
// Map the parsed function definitions by name
Map<String, DynamicFunctionSymbol> functionMap = new SpaceEfficientHashMap<>();
while( peekDynamicFunctionSymbol() != null )
{
DynamicFunctionSymbol function = popDynamicFunctionSymbol();
functionMap.put( function.getName(), function );
}
mainStatement.addParseIssues( program.getParseIssues() );
program.setMainStatement( mainStatement );
program.setFunctions( functionMap );
program.setLocation( mainStatement.getLocation() );
mainStatement.setParent( program );
verify( program, match( null, SourceCodeTokenizer.TT_EOF ), Res.MSG_END_OF_STMT );
_bParsed = true;
PostCompilationAnalysis.maybeAnalyze( program );
if( !bDoNotThrowParseResultsException )
{
verifyParsedElement( program );
}
CompileTimeAnnotationHandler.postDefinitionVerification( mainStatement );
}
finally
{
if( isolatedScope )
{
_symTable.popScope();
}
}
}
finally
{
_parsingProgram = null;
}
if( assignRuntime )
{
assignRuntime( program, isolatedScope, ctx, superType, partId);
}
}
finally
{
popScriptPart( partId );
}
return program;
}
@Override
public void setGenerateRootExpressionAccessForProgram( boolean bGenRootExprAccess )
{
_bGenRootExprAccess = bGenRootExprAccess;
}
public boolean isGenerateRootExpressionAccessForProgram()
{
return _bGenRootExprAccess;
}
private void assignRuntime(ParsedElement elem, boolean bIsolatedScope, IFileContext context, IType superType, IScriptPartId partId) throws ParseResultsException
{
// if( isForStringLiteralTemplate() )
// return;
if( elem.isCompileTimeConstant() )
{
return;
}
if( bIsolatedScope )
{
_symTable.pushScope();
}
try
{
GosuProgramParser programParser = new GosuProgramParser();
ParserOptions options = new ParserOptions().withTypeUsesMap( getTypeUsesMap() )
.withExpectedType( elem.getReturnType() )
.withTokenizerInstructor( getTokenizerInstructor() )
.withSuperType( superType )
.withFileContext( context )
.withCtxInferenceMgr( _ctxInferenceMgr.copy() )
.withGenRootExprAccess( isGenerateRootExpressionAccessForProgram() )
.asThrowawayProgram()
.withScriptPartId(partId);
IParseResult result = programParser.parseExpressionOrProgram( getScript(), getSymbolTable(), options );
IGosuProgramInternal p = (IGosuProgramInternal)result.getProgram();
if( p == null )
{
throw new IllegalStateException();
}
elem.setGosuProgram( p );
if( partId != null )
{
partId.setRuntimeType( p );
}
}
catch( Exception t )
{
if( t instanceof ParseResultsException )
{
if( isForStringLiteralTemplate() )
{
//## todo: shouldn't be creating a GosuProgram for this case, but the apps don't start for some reason.
// Uncomment code at top of method
return;
}
throw (ParseResultsException)t;
}
throw (RuntimeException)t;
}
finally
{
if( bIsolatedScope )
{
_symTable.popScope();
}
}
}
private boolean isForStringLiteralTemplate()
{
return getScriptPart() != null &&
getScriptPart().getId() != null &&
(getScriptPart().getId().equals( TemplateGenerator.GS_TEMPLATE ) ||
getScriptPart().getId().equals( TemplateGenerator.GS_TEMPLATE_PARSED )) &&
getScriptPart().getContainingType() instanceof IGosuClass;
}
List<IClasspathStatement> parseProgramClasspathStatements()
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getLineOffset();
List<IClasspathStatement> returnList = new ArrayList<>();
while( match( null, Keyword.KW_classpath ) )
{
ClasspathStatement cpStatement = new ClasspathStatement();
if( parseStringLiteralSeparately() )
{
StringLiteral expression = (StringLiteral)popExpression();
String strVal = (String)expression.evaluate();
cpStatement.setClasspath( strVal );
if( strVal.contains( ";" ) )
{
verifyOrWarn( cpStatement, false, true, Res.MSG_COMMA_IS_THE_CLASSPATH_SEPARATOR );
}
}
else
{
verify( cpStatement, false, Res.MSG_CLASSPATH_STATEMENT_EXPECTS_A_STRING );
}
returnList.add( cpStatement );
pushStatement(cpStatement);
try
{
setLocation( iOffset, iLineNum, iColumn );
if( getGosuClass() != null &&
((IGosuProgramInternal)getGosuClass()).isParsingExecutableProgramStatements() )
{
// Remove unwanted cp stmt resulting from parsing program on second pass for evaluate() method
getLocationsList().remove( cpStatement.getLocation() );
}
}
finally
{
popStatement();
}
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getLineOffset();
}
return returnList;
}
List<ITypeLoaderStatement> parseProgramTypeLoaderStatements()
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getLineOffset();
List<ITypeLoaderStatement> returnList = new ArrayList<>();
while( match( null, Keyword.KW_typeloader ) )
{
TypeLoaderStatement stmt = new TypeLoaderStatement();
parseTypeLiteral();
TypeLiteral expression = (TypeLiteral)popExpression();
stmt.setTypeLoader( expression.getType().getType() );
returnList.add( stmt );
pushStatement( stmt );
try
{
setLocation( iOffset, iLineNum, iColumn );
}
finally
{
popStatement();
}
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getLineOffset();
}
return returnList;
}
public Expression parseExp( IScriptPartId partId ) throws ParseResultsException
{
return parseExp( partId, true, null, true );
}
public Expression parseExp( IScriptPartId partId, IType expectedExpressionType ) throws ParseResultsException
{
return parseExp( partId, true, expectedExpressionType, true );
}
public Expression parseExp( IScriptPartId partId, IType expectedExpressionType, IFileContext context, boolean assignRuntime ) throws ParseResultsException
{
return parseExp( partId, true, expectedExpressionType, context, assignRuntime );
}
private Expression parseExp( IScriptPartId partId, boolean isolatedScope, IType expectedExpressionType, boolean assignRuntime ) throws ParseResultsException {
return parseExp(partId, isolatedScope, expectedExpressionType, null, assignRuntime);
}
private Expression parseExp( IScriptPartId partId, boolean isolatedScope,
IType expectedExpressionType, IFileContext context, boolean assignRuntime ) throws ParseResultsException
{
pushScriptPart( partId );
_tokenizer.nextToken();
Expression expression;
if ( isolatedScope )
{
_symTable.pushScope();
}
try
{
parseExpression( new ContextType( expectedExpressionType, false ) );
expression = popExpression();
verify( expression, match( null, SourceCodeTokenizer.TT_EOF ), Res.MSG_END_OF_EXPRESSION );
_bParsed = true;
verifyParsedElement( expression );
}
finally
{
if ( isolatedScope )
{
_symTable.popScope();
}
popScriptPart( partId );
}
if (assignRuntime) {
assignRuntime( expression, isolatedScope, context, null, partId);
}
CompileTimeAnnotationHandler.postDefinitionVerification( expression );
return expression;
}
public IExpression parseExpOrProgram( IScriptPartId partId ) throws ParseResultsException
{
return parseExpOrProgram( partId, true, true );
}
public IExpression parseExpOrProgram( IScriptPartId partId, boolean isolatedScope, boolean assignRuntime ) throws ParseResultsException
{
return parseExpOrProgram( partId, null, isolatedScope, assignRuntime );
}
public IExpression parseExpOrProgram( IScriptPartId partId, IType typeExpected, boolean isolatedScope, boolean assignRuntime ) throws ParseResultsException
{
IExpression exp;
try
{
exp = parseExp( partId, isolatedScope, typeExpected, assignRuntime );
}
catch( ParseResultsException expressionParseResultException )
{
boolean isProbablyProgram = !getTokenizer().isEOF();
//noinspection CaughtExceptionImmediatelyRethrown
try
{
Map<String, List<IFunctionSymbol>> dfsDeclByName = new HashMap<>(_dfsDeclByName);
resetScript();
_dfsDeclByName = dfsDeclByName;
exp = parseProgram( partId, isolatedScope, typeExpected );
}
catch( ParseResultsException programParseResultsException )
{
if( isProbablyProgram )
{
throw programParseResultsException;
}
else
{
// Note we can't just rethrow the original exception because we need
// the locations etc. in the parser, so we have to reparse and let it throw.
Map<String, List<IFunctionSymbol>> dfsDeclByName = new HashMap<>(_dfsDeclByName);
resetScript();
_dfsDeclByName = dfsDeclByName;
exp = parseExp( partId, isolatedScope, null, assignRuntime );
}
}
}
return exp;
}
public TypeLiteral parseTypeLiteral( IScriptPartId partId ) throws ParseResultsException
{
pushScriptPart( partId );
try
{
_tokenizer.nextToken();
_symTable.pushScope();
try
{
parseTypeLiteral();
}
finally
{
_symTable.popScope();
}
Expression expression = popExpression();
verify( expression, match( null, SourceCodeTokenizer.TT_EOF ), Res.MSG_END_OF_EXPRESSION );
_bParsed = true;
return (TypeLiteral)expression;
}
finally
{
popScriptPart( partId );
}
}
public boolean isParsed()
{
return _bParsed;
}
protected void setParsed( boolean bParsed )
{
_bParsed = bParsed;
}
// GosuParser methods
@Override
final public SourceCodeTokenizer getTokenizer()
{
return _tokenizer;
}
@Override
List<ParseTree> getLocationsList()
{
return _locations;
}
public List<IParseTree> getLocations()
{
return new ArrayList<>( _locations );
}
public ParseTree peekLocation()
{
if( _locations.isEmpty() )
{
return null;
}
return _locations.get( _locations.size()-1 );
}
public boolean hasWarnings()
{
return false;
}
public boolean isThrowParseResultsExceptionForWarnings()
{
return _bThrowForWarnings;
}
public void setThrowParseExceptionForWarnings( boolean bThrowParseExceptionForWarnings )
{
_bThrowForWarnings = bThrowParseExceptionForWarnings;
}
// expression
// <conditional-expression>
void parseExpression()
{
parseExpressionNoVerify( ContextType.EMPTY );
}
void parseExpression( ContextType contextType )
{
parseExpression( contextType, true );
}
void parseExpression( ContextType ctxType, boolean bVerify )
{
parseExpressionNoVerify( ctxType );
if( bVerify && ctxType.getType() != null && !ctxType.isMethodScoring() )
{
Expression expr = popExpression();
verifyComparable( ctxType.getType(), expr );
expr = possiblyWrapWithImplicitCoercion( expr, ctxType.getType() );
pushExpression( expr );
}
}
void parseExpressionNoVerify( ContextType ctxType )
{
pushInferredContextTypes( ctxType );
try
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = _tokenizer.getTokenColumn();
parseConditionalExpression();
if( !ctxType.isMethodScoring() )
{
convertNumberLiteralsToContextType( ctxType.getType() );
convertNullLiterals( ctxType.getType() );
}
setLocation( iOffset, iLineNum, iColumn );
}
finally
{
popInferredContextTypes();
}
}
public void setContextInferenceManager(ContextInferenceManager ctxInferenceMgr) {
_ctxInferenceMgr = ctxInferenceMgr;
}
public void pushInferredContextTypes( ContextType ctxType )
{
if( ctxType == null )
{
ctxType = ContextType.EMPTY;
}
if( ctxType.getType() instanceof IBlockType )
{
// Force block param types to initialize if necessary
((IBlockType)ctxType.getType()).getParameterTypes();
}
_inferredContextStack.push( ctxType );
}
public void popInferredContextTypes()
{
_inferredContextStack.pop();
}
public ContextType getContextType()
{
if( _inferredContextStack.isEmpty() )
{
return ContextType.EMPTY;
}
else
{
ContextType ctxType = _inferredContextStack.peek();
assert ctxType != null;
return ctxType;
}
}
private void convertNullLiterals( IType contextType )
{
if( !_stack.isEmpty() && contextType != null )
{
Expression expression = peekExpression();
if( expression instanceof NullExpression && !contextType.isPrimitive() )
{
expression.setType( contextType );
}
}
}
@SuppressWarnings({"ThrowableResultOfMethodCallIgnored"})
private void convertNumberLiteralsToContextType( IType contextType )
{
if( _stack.isEmpty() )
{
return;
}
Expression expr = peekExpression();
UnaryExpression unary = null;
if( expr instanceof UnaryExpression )
{
unary = (UnaryExpression)expr;
Expression subexpr = unary.getExpression();
if( subexpr instanceof NumericLiteral )
{
expr = subexpr;
}
}
if( expr instanceof NumericLiteral )
{
// If a numeric type is in context, consider this number literal to be of that type
NumericLiteral literal = (NumericLiteral)expr;
IType ctxNumberType = getNumberTypeFromContextType( contextType );
if( literal.getType() == ctxNumberType )
{
return;
}
if( ctxNumberType != null && !literal.isExplicitlyTyped() )
{
// If a numeric type is in context and the literal is not explicity typed, consider this number literal to be of that type
if( ctxNumberType != null && !literal.isExplicitlyTyped() )
{
String strValue = literal.getStrValue();
try
{
if( isPrefixNumericLiteral( strValue ) )
{
strValue = parseIntOrLongOrBigInt( strValue ) + "";
}
if( JavaTypes.pBYTE().equals( ctxNumberType ) || JavaTypes.BYTE().equals( ctxNumberType ) )
{
literal.setValue( Byte.parseByte( strValue ) );
}
else if( JavaTypes.pSHORT().equals( ctxNumberType ) || JavaTypes.SHORT().equals( ctxNumberType ) )
{
literal.setValue( Short.parseShort( strValue ) );
}
else if( JavaTypes.pINT().equals( ctxNumberType ) || JavaTypes.INTEGER().equals( ctxNumberType ) )
{
literal.setValue( Integer.parseInt( strValue ) );
}
else if( JavaTypes.pLONG().equals( ctxNumberType ) || JavaTypes.LONG().equals( ctxNumberType ) )
{
literal.setValue( Long.parseLong( strValue ) );
}
else if( JavaTypes.pFLOAT().equals( ctxNumberType ) || JavaTypes.FLOAT().equals( ctxNumberType ) )
{
literal.setValue( Float.parseFloat( strValue ) );
}
else if( JavaTypes.pDOUBLE().equals( ctxNumberType ) || JavaTypes.DOUBLE().equals( ctxNumberType ) )
{
literal.setValue( Double.parseDouble( strValue ) );
}
else if( JavaTypes.BIG_INTEGER().equals( ctxNumberType ) )
{
literal.setValue( new BigInteger( strValue ) );
}
else if( JavaTypes.BIG_DECIMAL().equals( ctxNumberType ) )
{
literal.setValue( new BigDecimal( strValue ) );
}
else if( JavaTypes.RATIONAL().equals( ctxNumberType ) )
{
literal.setValue( Rational.get( strValue ) );
}
else if( literal.getType().isPrimitive() &&
(JavaTypes.OBJECT().equals( ctxNumberType ) ||
(JavaTypes.NUMBER().equals( ctxNumberType ) && ctxNumberType.isAssignableFrom( TypeSystem.getBoxType( literal.getType() )))) )
{
ctxNumberType = TypeSystem.getBoxType( literal.getType() );
}
else
{
return;
}
}
catch( NumberFormatException ex )
{
return;
//addError( literal, Res.MSG_IMPROPER_VALUE_FOR_NUMERIC_TYPE, strValue, ctxNumberType.getName() );
}
literal.setType( ctxNumberType );
if( unary != null )
{
unary.setType( ctxNumberType );
}
}
}
}
}
private IType getNumberTypeFromContextType( IType ctxType )
{
if( ctxType == null )
{
return null;
}
// Return ctxType if it's a primitive number, assignable to Number, or is Object, otherwise null.
IType compType = ctxType.isPrimitive() ? TypeSystem.getBoxType( ctxType ) : ctxType;
return JavaTypes.NUMBER().isAssignableFrom( compType ) || JavaTypes.OBJECT().equals( compType ) ? ctxType : null;
}
// conditional-expression
// <conditional-or-expression>
// <conditional-expression> ? <conditional-expression> : <conditional-expression>
// Left recursion removed is:
// conditional-expression
// <conditional-or-expression> <conditional-expression2>
// conditional-expression2
// ? <conditional-expression> : <conditional-expression>
// <null>
void parseConditionalExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = _tokenizer.getTokenColumn();
_parseConditionalExpression();
setLocation( iOffset, iLineNum, iColumn );
}
void _parseConditionalExpression()
{
// <conditional-or-expression>
_ctxInferenceMgr.pushCtx();
parseConditionalOrExpression();
boolean foundCondExpr = false;
// <conditional-or-expression> ? <conditional-expression> : <conditional-expression>
if( match( null, "?", SourceCodeTokenizer.TT_OPERATOR ) )
{
foundCondExpr = true;
ConditionalTernaryExpression cte = new ConditionalTernaryExpression();
Expression condition = popExpression();
if( !JavaTypes.pBOOLEAN().equals( condition.getType() ) &&
!JavaTypes.BOOLEAN().equals( condition.getType() ) )
{
condition.addParseException( Res.MSG_CONDITIONAL_EXPRESSION_EXPECTS_BOOLEAN );
}
condition = possiblyWrapWithImplicitCoercion( condition, JavaTypes.pBOOLEAN() );
verifyComparable( JavaTypes.pBOOLEAN(), condition );
parseConditionalExpression();
Expression first = popExpression();
verify( cte, match( null, ":", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_COLON_TERNARY );
_ctxInferenceMgr.popCtx( false );
parseConditionalExpression();
Expression second = popExpression();
_ctxInferenceMgr.pushCtx();
IType type = findLeastUpperBoundWithCoercions(cte, first, second);
cte.setType( type );
cte.setCondition( condition );
cte.setFirst( possiblyWrapWithImplicitCoercion( first, type ) );
cte.setSecond( possiblyWrapWithImplicitCoercion( second, type ) );
pushExpression( cte );
}
else if( match( null, "?:", SourceCodeTokenizer.TT_OPERATOR ) )
{
foundCondExpr = true;
ConditionalTernaryExpression cte = new ConditionalTernaryExpression();
Expression first = popExpression();
verify( first, !first.getType().isPrimitive(), Res.MSG_EXPECTING_REFERENCE_TYPE );
_ctxInferenceMgr.popCtx( false );
parseConditionalExpression();
Expression second = popExpression();
_ctxInferenceMgr.pushCtx();
IType type = findLeastUpperBoundWithCoercions(cte, first, second);
cte.setType( type );
cte.setCondition( null );
cte.setFirst( possiblyWrapWithImplicitCoercion( first, type ) );
cte.setSecond( possiblyWrapWithImplicitCoercion( second, type ) );
pushExpression( cte );
}
_ctxInferenceMgr.popCtx( !foundCondExpr );
}
private boolean isPrimitiveOrBoxedOrBigIntegerOrBigDecimal(IType type)
{
return type != null &&
!JavaTypes.pVOID().equals(type) &&
( type.isPrimitive() ||
TypeSystem.getPrimitiveType(type) != null ||
JavaTypes.BIG_INTEGER().equals(type) ||
JavaTypes.BIG_DECIMAL().equals(type)
);
}
private IType findLeastUpperBoundWithCoercions(ConditionalTernaryExpression cte, Expression first, Expression second) {
IType type;
IType firstType = first.getType();
IType secondType = second.getType();
if( isPrimitiveOrBoxedOrBigIntegerOrBigDecimal( firstType ) &&
isPrimitiveOrBoxedOrBigIntegerOrBigDecimal( secondType ) )
{
type = TypeLord.getLeastUpperBoundForPrimitiveTypes( firstType, secondType );
if( !verify( cte, type != null, Res.MSG_LOSS_OF_PRECISION_IN_CONDITIONAL_EXP ) )
{
type = ErrorType.getInstance();
}
}
else if( GosuParserTypes.NULL_TYPE().equals( firstType ) && GosuParserTypes.NULL_TYPE().equals( secondType ) )
{
IType ctxType = getContextType().getType();
return ctxType != null && !ctxType.isPrimitive() ? ctxType : GosuParserTypes.NULL_TYPE();
}
else if( GosuParserTypes.NULL_TYPE().equals( firstType ) && secondType.isPrimitive() )
{
return TypeLord.getBoxedTypeFromPrimitiveType( secondType );
}
else if( GosuParserTypes.NULL_TYPE().equals( secondType ) && firstType.isPrimitive() )
{
return TypeLord.getBoxedTypeFromPrimitiveType( firstType );
} // HACK
// Do not allow literal strings that are coercable to the type of the other side to modify the
//type of the expression
else if( canCoerceFromString(first, second) )
{
type = secondType;
} // HACK
// Do not allow literal strings that are coercable to the type of the other side to modify the
//type of the expression
else if( canCoerceFromString(second, first) )
{
type = firstType;
}
else
{
if( firstType.isPrimitive() && !GosuParserTypes.NULL_TYPE().equals( firstType ) && !secondType.isPrimitive() && !StandardCoercionManager.isBoxed( secondType ) )
{
firstType = TypeLord.getBoxedTypeFromPrimitiveType( firstType );
}
else if( secondType.isPrimitive() && !GosuParserTypes.NULL_TYPE().equals( secondType ) && !firstType.isPrimitive() && !StandardCoercionManager.isBoxed( firstType ) )
{
secondType = TypeLord.getBoxedTypeFromPrimitiveType( secondType );
}
List<IType> list = new ArrayList<>();
if( !GosuParserTypes.NULL_TYPE().equals( firstType ) )
{
list.add( firstType );
}
if( !GosuParserTypes.NULL_TYPE().equals( secondType ) )
{
list.add( secondType );
}
type = TypeLord.findLeastUpperBound( list );
}
return type;
}
private boolean canCoerceFromString(Expression first, Expression second)
{
return first instanceof ILiteralExpression &&
!(second instanceof ILiteralExpression) &&
JavaTypes.STRING().equals(first.getType()) &&
CommonServices.getCoercionManager().canCoerce( second.getType(), JavaTypes.STRING() );
}
private Expression wrapExpressionIfNeeded( Expression first, Expression second )
{
if( first.getType() == JavaTypes.pVOID() && second.getType().isPrimitive() )
{
return possiblyWrapWithImplicitCoercion( first, TypeSystem.getBoxType( second.getType() ) );
}
else if( second.getType() == JavaTypes.pVOID() && first.getType().isPrimitive() )
{
return possiblyWrapWithImplicitCoercion( first, TypeSystem.getBoxType( first.getType() ) );
}
return first;
}
// conditional-or-expression
// <conditional-and-expression>
// <conditional-or-expression> || <conditional-and-expression>
// Left recursion removed is:
// conditional-or-expression
// <conditional-and-expression> <conditional-or-expression2>
// conditional-or-expression2
// || <conditional-and-expression>
// <null>
void parseConditionalOrExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
parseConditionalAndExpression();
// <conditional-or-expression2>
boolean foundOr = false;
do
{
if( match( null, "||", SourceCodeTokenizer.TT_OPERATOR ) || match( null, Keyword.KW_or ) )
{
_ctxInferenceMgr.clear();
foundOr = true;
parseConditionalAndExpression();
ConditionalOrExpression e = new ConditionalOrExpression();
Expression rhs = popExpression();
verifyComparable( JavaTypes.pBOOLEAN(), rhs, true, true );
rhs = possiblyWrapWithImplicitCoercion( rhs, JavaTypes.pBOOLEAN() );
Expression lhs = popExpression();
verifyComparable( JavaTypes.pBOOLEAN(), lhs, true, true );
lhs = possiblyWrapWithImplicitCoercion( lhs, JavaTypes.pBOOLEAN() );
e.setLHS( lhs );
e.setRHS( rhs );
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
if (foundOr) {
_ctxInferenceMgr.clear();
}
}
// conditional-and-expression
// <equality-expression>
// <conditional-and-expression> && <equality-expression>
// Left recursion removed is:
// conditional-and-expression
// <equality-expression> <conditional-and-expression2>
// conditional-and-expression2
// && <equality-expression>
// <null>
void parseConditionalAndExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
parseBitwiseOrExpression();
// <conditional-and-expression2>
do
{
if( match( null, "&&", SourceCodeTokenizer.TT_OPERATOR ) || match( null, Keyword.KW_and ) )
{
parseBitwiseOrExpression();
ConditionalAndExpression e = new ConditionalAndExpression();
Expression rhs = popExpression();
verifyComparable( JavaTypes.pBOOLEAN(), rhs, true, true );
rhs = possiblyWrapWithImplicitCoercion( rhs, JavaTypes.pBOOLEAN() );
Expression lhs = popExpression();
verifyComparable( JavaTypes.pBOOLEAN(), lhs, true, true );
lhs = possiblyWrapWithImplicitCoercion( lhs, JavaTypes.pBOOLEAN() );
e.setLHS( lhs );
e.setRHS( rhs );
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
// bitwise-or-expression
// <bitwise-xor-expression>
// <bitwise-or-expression> | <bitwise-xor-expression>
// Left recursion removed is:
// bitwise-or-expression
// <bitwise-xor-expression> <bitwise-or-expression2>
// bitwise-or-expression2
// | <bitwise-xor-expression>
// <null>
void parseBitwiseOrExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
parseBitwiseXorExpression();
// <bitwise-or-expression2>
do
{
if( match( null, "|", SourceCodeTokenizer.TT_OPERATOR ) )
{
BitwiseOrExpression e = new BitwiseOrExpression();
Expression lhs = popExpression();
lhs = ensureOperandIntOrLongOrBoolean( lhs );
parseBitwiseXorExpression();
Expression rhs = popExpression();
if( lhs.getType() == JavaTypes.pBOOLEAN() )
{
rhs = ensureOperandBoolean( rhs );
}
else
{
rhs = ensureOperandIntOrLong( rhs );
}
rhs = possiblyWrapWithImplicitCoercion( rhs, lhs.getType() );
e.setLHS( lhs );
e.setRHS( rhs );
if( lhs.getType() == JavaTypes.pBOOLEAN() )
{
e.setType( lhs.getType() );
}
else
{
e.setType( resolveTypeForArithmeticExpression( e, lhs.getType(), "|", rhs.getType() ) );
}
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
private Expression ensureOperandIntOrLongOrBoolean( Expression op )
{
IType opType = op.getType();
if( verify( op, isPrimitiveOrBoxedIntegerType(opType) || opType == JavaTypes.BOOLEAN() || opType == JavaTypes.pBOOLEAN(), Res.MSG_BITWISE_OPERAND_MUST_BE_INT_OR_LONG ) )
{
opType = opType == JavaTypes.LONG() || opType == JavaTypes.pLONG()
? JavaTypes.pLONG()
: opType == JavaTypes.BOOLEAN() || opType == JavaTypes.pBOOLEAN()
? JavaTypes.pBOOLEAN()
: JavaTypes.pINT();
op = possiblyWrapWithImplicitCoercion( op, opType );
}
return op;
}
private boolean isPrimitiveOrBoxedIntegerType(IType type) {
return type == JavaTypes.LONG() || type == JavaTypes.pLONG() ||
type == JavaTypes.CHARACTER() || type == JavaTypes.pCHAR() ||
type == JavaTypes.INTEGER() || type == JavaTypes.pINT() ||
type == JavaTypes.SHORT() || type == JavaTypes.pSHORT() ||
type == JavaTypes.BYTE() || type == JavaTypes.pBYTE();
}
private Expression ensureOperandBoolean( Expression op )
{
IType opType = op.getType();
if( verify( op, opType == JavaTypes.BOOLEAN() || opType == JavaTypes.pBOOLEAN(), Res.MSG_CONDITIONAL_EXPRESSION_EXPECTS_BOOLEAN ) )
{
op = possiblyWrapWithImplicitCoercion( op, JavaTypes.pBOOLEAN() );
}
return op;
}
private Expression ensureOperandIntOrLong( Expression op )
{
IType opType = op.getType();
if( verify( op, isPrimitiveOrBoxedIntegerType(opType), Res.MSG_BITWISE_OPERAND_MUST_BE_INT_OR_LONG ) )
{
opType = opType == JavaTypes.LONG() || opType == JavaTypes.pLONG() ? JavaTypes.pLONG() : JavaTypes.pINT();
op = possiblyWrapWithImplicitCoercion( op, opType );
}
return op;
}
// bitwise-xor-expression
// <bitwise-and-expression>
// <bitwise-xor-expression> ^ <bitwise-xor-expression>
// Left recursion removed is:
// bitwise-xor-expression
// <bitwise-and-expression> <bitwise-xor-expression2>
// bitwise-xor-expression2
// ^ <bitwise-and-expression>
// <null>
void parseBitwiseXorExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
parseBitwiseAndExpression();
// <bitwise-xor-expression2>
do
{
if( match( null, "^", SourceCodeTokenizer.TT_OPERATOR ) )
{
BitwiseXorExpression e = new BitwiseXorExpression();
Expression lhs = popExpression();
lhs = ensureOperandIntOrLongOrBoolean( lhs );
parseBitwiseAndExpression();
Expression rhs = popExpression();
if( lhs.getType() == JavaTypes.pBOOLEAN() )
{
rhs = ensureOperandBoolean( rhs );
}
else
{
rhs = ensureOperandIntOrLong( rhs );
}
rhs = possiblyWrapWithImplicitCoercion( rhs, lhs.getType() );
e.setLHS( lhs );
e.setRHS( rhs );
if( lhs.getType() == JavaTypes.pBOOLEAN() )
{
e.setType( lhs.getType() );
}
else
{
e.setType( resolveTypeForArithmeticExpression( e, lhs.getType(), "^", rhs.getType() ) );
}
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
// bitwise-and-expression
// <equality-expression>
// <bitwise-and-expression> & <equality-expression>
// Left recursion removed is:
// bitwise-and-expression
// <equality-expression> <bitwise-and-expression2>
// bitwise-and-expression2
// ^ <equality-expression>
// <null>
void parseBitwiseAndExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
parseEqualityExpression();
// <bitwise-and-expression2>
do
{
if( match( null, "&", SourceCodeTokenizer.TT_OPERATOR ) )
{
BitwiseAndExpression e = new BitwiseAndExpression();
Expression lhs = popExpression();
lhs = ensureOperandIntOrLongOrBoolean( lhs );
parseEqualityExpression();
Expression rhs = popExpression();
if( lhs.getType() == JavaTypes.pBOOLEAN() )
{
rhs = ensureOperandBoolean( rhs );
}
else
{
rhs = ensureOperandIntOrLong( rhs );
}
verifyComparable( lhs.getType(), rhs, true, true );
rhs = possiblyWrapWithImplicitCoercion( rhs, lhs.getType() );
e.setLHS( lhs );
e.setRHS( rhs );
if( lhs.getType() == JavaTypes.pBOOLEAN() )
{
e.setType( lhs.getType() );
}
else
{
e.setType( resolveTypeForArithmeticExpression( e, lhs.getType(), "&", rhs.getType() ) );
}
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
private void parseEqualityExpression()
{
int mark = getTokenizer().mark();
int locationsCount = _locations.size();
ContextType contextType = getContextType();
_parseEqualityExpression();
if( contextType != null &&
peekExpression().hasParseExceptions() &&
isConditional( peekExpression() ) )
{
// The boolean context type should have been applied to the conditional expression; it should not apply to its LHS operand.
// Since a conditional assumes boolean we can safely reparse without it.
backtrack( mark, locationsCount );
pushInferredContextTypes( ContextType.EMPTY );
try
{
_parseEqualityExpression();
}
finally
{
popInferredContextTypes();
}
}
}
private boolean isConditional( IExpression expression )
{
if( expression instanceof IConditionalExpression )
{
return true;
}
if( expression instanceof IParenthesizedExpression )
{
return isConditional( ((IParenthesizedExpression)expression).getExpression() );
}
return false;
}
private void _parseEqualityExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
boolean matchOldNotEqOp = false;
parseRelationalExpression();
// <relational-expression2>
do
{
boolean bEq;
Token token = getTokenizer().getCurrentToken();
String value = token.getStringValue();
if( token.getType() == SourceCodeTokenizer.TT_OPERATOR && value != null &&
((bEq = value.equals( "===" )) ||
value.equals( "!==" )) )
{
getTokenizer().nextToken();
IdentityExpression e = new IdentityExpression();
Expression lhs = popExpression();
pushInferredContextTypes( new ContextType( lhs.getType() ) );
try
{
parseRelationalExpression();
}
finally
{
popInferredContextTypes();
}
Expression rhs = popExpression();
if( verify( lhs, !lhs.getType().isPrimitive() ||
JavaTypes.pVOID().isAssignableFrom( lhs.getType() ) ||
JavaTypes.STRING().isAssignableFrom( lhs.getType() ), Res.MSG_PRIMITIVES_NOT_ALLOWED_HERE ) &&
verify( rhs, !rhs.getType().isPrimitive() ||
JavaTypes.pVOID().isAssignableFrom( rhs.getType() ) ||
JavaTypes.STRING().isAssignableFrom( rhs.getType() ), Res.MSG_PRIMITIVES_NOT_ALLOWED_HERE ) )
{
verifyComparable( lhs.getType(), rhs, true, true );
}
e.setLHS( lhs );
e.setRHS( rhs );
e.setEquals( bEq );
pushExpression( e );
}
else if( token.getType() == SourceCodeTokenizer.TT_OPERATOR && value != null &&
((bEq = value.equals( "==" )) ||
value.equals( "!=" ) ||
(matchOldNotEqOp = value.equals( "!=" ))) )
{
getTokenizer().nextToken();
EqualityExpression e = new EqualityExpression();
Expression lhs = popExpression();
verify( e, !matchOldNotEqOp, Res.MSG_OBSOLETE_NOT_EQUAL_OP );
pushInferredContextTypes( new ContextType( lhs.getType() ) );
try
{
parseRelationalExpression();
}
finally
{
popInferredContextTypes();
}
Expression rhs = popExpression();
rhs = verifyConditionalTypes( lhs, rhs );
//## see PL-9512
verifyCoercionSymmetry( e, lhs, rhs );
e.setLHS( lhs );
e.setRHS( rhs );
e.setEquals( bEq );
warnOnSuspiciousEqualsOperator(e);
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
private void warnOnSuspiciousEqualsOperator( EqualityExpression e )
{
Expression lhs = e.getLHS();
Expression rhs = e.getRHS();
if( (isThisPointer( lhs ) && isEqualsArgReference( rhs )) ||
(isEqualsArgReference( lhs ) && isThisPointer( rhs )) )
{
warn( e, false, Res.MSG_WARN_ON_SUSPICIOUS_THIS_COMPARISON );
}
}
private boolean isThisPointer( Expression expr )
{
return expr != null &&
expr instanceof Identifier &&
((Identifier)expr).getSymbol() != null &&
Keyword.KW_this.getName().equals( ((Identifier)expr).getSymbol().getName() );
}
private boolean isEqualsArgReference( Expression expr )
{
if( isParsingFunction() && expr != null && expr instanceof Identifier )
{
FunctionType functionType = peekParsingFunction();
if( "equals".equals( functionType.getDisplayName() ) )
{
if( functionType.getParameterTypes().length == 1 &&
functionType.getParameterTypes()[0] == JavaTypes.OBJECT() )
{
ISymbol symbol = ((Identifier)expr).getSymbol();
if( symbol != null && functionType.getParameterNames()[0].equals( symbol.getName() ) )
{
return true;
}
}
}
}
return false;
}
private void verifyCoercionSymmetry( EqualityExpression e, Expression lhs, Expression rhs )
{
if( !lhs.hasParseExceptions() && !rhs.hasParseExceptions() )
{
ICoercionManager cocerionManager = CommonServices.getCoercionManager();
boolean bDontWarn =
((lhs.getType() != JavaTypes.OBJECT() && rhs.getType() != JavaTypes.OBJECT()) || // neither side is Object, or
(lhs.getType() == JavaTypes.pVOID() || rhs.getType() == JavaTypes.pVOID()) || // one side is "null", or
(lhs.getType() != null && BeanAccess.isBeanType( lhs.getType() ) && // both sides are "beans"
rhs.getType() != null && BeanAccess.isBeanType( rhs.getType() )) ||
cocerionManager.resolveCoercerStatically( lhs.getType(), rhs.getType() ) == // coercer is symmetric
cocerionManager.resolveCoercerStatically( rhs.getType(), lhs.getType() ));
verifyOrWarn( e, bDontWarn, true,
Res.MSG_ASYMMETRICAL_COMPARISON, lhs.getType(), rhs.getType() );
}
}
private void verifyRelationalOperandsComparable( BinaryExpression expr )
{
if( !verify( expr, expr.getRHS().getType() != JavaTypes.OBJECT(), Res.MSG_RELATIONAL_OPERATOR_CANNOT_BE_APPLIED_TO_TYPE, expr.getOperator(), Object.class.getName() ) )
{
return;
}
boolean bComparable = false;
IType lhsType = expr.getLHS().getType();
IType rhsType = expr.getRHS().getType();
if( BeanAccess.isNumericType( lhsType ) )
{
if( (JavaTypes.IDIMENSION().isAssignableFrom( lhsType ) && isFinalDimension( this, lhsType, expr ) ||
JavaTypes.IDIMENSION().isAssignableFrom( rhsType ) && isFinalDimension( this, rhsType, expr )) &&
lhsType != rhsType )
{
// Operands must both be Dimensions for comparison
addError( expr, Res.MSG_DIMENSION_ADDITION_MUST_BE_SAME_TYPE );
return;
}
else
{
bComparable = true;
}
}
else if( lhsType == GosuParserTypes.DATETIME_TYPE() )
{
bComparable = true;
}
else
{
if( BeanAccess.isBeanType( lhsType ) )
{
if( BeanAccess.isBeanType( rhsType ) )
{
if( lhsType.isAssignableFrom( rhsType ) )
{
if( JavaTypes.COMPARABLE().isAssignableFrom( lhsType ) )
{
bComparable = true;
}
}
}
}
if( !bComparable &&
(JavaTypes.CHAR_SEQUENCE().isAssignableFrom( lhsType ) ||
JavaTypes.CHAR_SEQUENCE().isAssignableFrom( rhsType )) )
{
bComparable = true;
}
if( !bComparable )
{
bComparable = lhsType.isDynamic() || rhsType.isDynamic();
}
}
verify( expr, bComparable, Res.MSG_RELATIONAL_OPERATOR_CANNOT_BE_APPLIED_TO_TYPE, expr.getOperator(), lhsType );
}
private Expression verifyConditionalTypes( Expression lhs, Expression rhs )
{
IType lhsType = lhs.getType();
IType rhsType = rhs.getType();
if( lhsType.isPrimitive() && !(lhs instanceof NullExpression) && rhs instanceof NullExpression ||
rhsType.isPrimitive() && !(rhs instanceof NullExpression) && lhs instanceof NullExpression )
{
rhs.addParseException( new ParseException( makeFullParserState(), lhsType, Res.MSG_RELATIONAL_OPERATOR_CANNOT_BE_APPLIED_TO_TYPE, "", JavaTypes.pVOID().getName() ) );
return rhs;
}
if( areMetaTypes( lhsType ,rhsType ) )
{
verify( rhs, TypeSystem.canCast( lhsType, rhsType ), Res.MSG_TYPE_MISMATCH, lhsType.getName(), rhsType.getName() );
return rhs;
}
IType numberType = ParserBase.resolveType(lhsType, '>', rhsType);
if( numberType instanceof ErrorType ||
JavaTypes.IDIMENSION().isAssignableFrom( lhsType ) ||
JavaTypes.IDIMENSION().isAssignableFrom( rhsType ) )
{
Expression wrappedRhs = verifyWithComparableDimension( rhs, lhsType );
if( wrappedRhs != null )
{
return wrappedRhs;
}
// Not a number, verify types the general way
verifyComparable( lhs.getType(), rhs, true, true );
}
else
{
// Get coercion warnings if any
verifyComparable( numberType, rhs, false, true );
verifyComparable( numberType, lhs, false, true );
}
return rhs;
}
private boolean areMetaTypes( IType lhsType, IType rhsType )
{
return (lhsType instanceof IMetaType || lhsType instanceof ITypeVariableType) &&
(rhsType instanceof IMetaType || rhsType instanceof ITypeVariableType);
}
private Expression verifyWithComparableDimension( Expression rhs, IType lhsType )
{
if( JavaTypes.IDIMENSION().isAssignableFrom( lhsType ) &&
JavaTypes.COMPARABLE().isAssignableFrom( lhsType ) )
{
for( IType iface : lhsType.getInterfaces() )
{
if( JavaTypes.COMPARABLE().isAssignableFrom( iface.getGenericType() ) )
{
IType type = iface.getTypeParameters()[0];
verifyComparable( type, rhs, true, true );
if( !rhs.hasParseExceptions() )
{
rhs = possiblyWrapWithImplicitCoercion( rhs, type );
}
return rhs;
}
}
}
return null;
}
// relational-expression
// <bitshift-expression>
// <relational-expression> < <bitshift-expression>
// <relational-expression> > <bitshift-expression>
// <relational-expression> <= <bitshift-expression>
// <relational-expression> >= <bitshift-expression>
// <relational-expression> typeis <type-literal>
// <relational-expression> as <type-literal>
// Left recursion removed from this *grammar* is:
// relational-expression
// <bitshift-expression> <relational-expression2>
// relational-expression2
// < <bitshift-expression>
// > <bitshift-expression>
// <= <bitshift-expression>
// >= <bitshift-expression>
// typeis <type-literal>
// as <type-literal>
// <null>
void parseRelationalExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
// <bitshift-expression>
parseIntervalExpression();
// <relational-expression2>
do
{
Token token = getTokenizer().getCurrentToken();
String value = token.getStringValue();
if( token.getType() == SourceCodeTokenizer.TT_OPERATOR && value != null &&
(value.equals( "<" ) ||
value.equals( ">" ) ||
value.equals( "<=" )) )
{
getTokenizer().nextToken();
if( value.equals( ">" ) && match( null, "=", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
if( token.getTokenEnd() == getTokenizer().getCurrentToken().getTokenStart() )
{
value = ">=";
match( null, "=", SourceCodeTokenizer.TT_OPERATOR );
}
}
parseIntervalExpression();
RelationalExpression e = new RelationalExpression();
Expression rhs = popExpression();
Expression lhs = popExpression();
rhs = verifyConditionalTypes( lhs, rhs );
e.setLHS( lhs );
e.setRHS( rhs );
e.setOperator( value );
if( !lhs.hasParseExceptions() && !rhs.hasParseExceptions() )
{
verifyRelationalOperandsComparable( e );
}
pushExpression( e );
}
else if( match( null, Keyword.KW_typeis ) )
{
parseTypeLiteral();
TypeIsExpression e = new TypeIsExpression();
Expression rhs = popExpression();
Expression lhs = popExpression();
if( verify( rhs, rhs instanceof TypeLiteral, Res.MSG_EXPECTING_TYPELITERAL ) )
{
verify( lhs, !lhs.getType().isPrimitive(), Res.MSG_PRIMITIVES_NOT_ALLOWED_HERE );
IType rhsType = ((TypeLiteral)rhs).getType().getType();
verify( rhs, !rhsType.isPrimitive(), Res.MSG_PRIMITIVES_NOT_ALLOWED_HERE );
verify( rhs, TypeLoaderAccess.instance().canCast( lhs.getType(), rhsType ), Res.MSG_INCONVERTIBLE_TYPES, lhs.getType().getName(), rhsType.getName() );
verifyTypeVarAreReified( rhs, rhsType );
e.setRHS( (TypeLiteral)rhs );
_ctxInferenceMgr.updateType( ContextInferenceManager.getUnwrappedExpression( lhs ), e.getRHS().evaluate() );
}
e.setLHS( lhs );
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
// interval-expression
// <bitshift-expression>
// <interval-expression> .. <bitshift-expression>
// <interval-expression> |.. <bitshift-expression>
// <interval-expression> ..| <bitshift-expression>
// <interval-expression> |..| <bitshift-expression>
// Left recursion removed from this *grammar* is:
// interval-expression
// <bitshift-expression> <interval-expression2>
// interval-expression2
// .. <bitshift-expression> <interval-expression2>
// |.. <bitshift-expression> <interval-expression2>
// ..| <bitshift-expression> <interval-expression2>
// |..| <bitshift-expression> <interval-expression2>
// <null>
void parseIntervalExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
_parseIntervalExpression();
setLocation( iOffset, iLineNum, iColumn );
}
void _parseIntervalExpression()
{
// <multiplicative-expression>
parseBitshiftExpression();
// <additive-expression2>
Token token = getTokenizer().getCurrentToken();
int mark = getTokenizer().mark();
String value = token.getStringValue();
boolean bOperator = token.getType() == SourceCodeTokenizer.TT_OPERATOR;
boolean bClosed = bOperator && "..".equals( value );
boolean bLeftOpen = !bClosed && bOperator && "|..".equals( value );
boolean bNextTokenIsDotNoWhitespace = false;
if( bClosed || bLeftOpen ) {
getTokenizer().nextToken();
Token dotToken = getTokenizer().getTokenAt( mark + 1 );
if( bNextTokenIsDotNoWhitespace = dotToken != null && dotToken.getType() == '.' )
{
getTokenizer().nextToken();
}
}
boolean bRightOpen = !bClosed && !bLeftOpen && bOperator && "..|".equals( value );
if( bRightOpen )
{
getTokenizer().nextToken();
}
else if( !bClosed && !bLeftOpen && !bRightOpen && bOperator && "|..|".equals( value ) )
{
getTokenizer().nextToken();
bLeftOpen = bRightOpen = true;
}
if( bClosed || bLeftOpen || bRightOpen )
{
parseBitshiftExpression();
Expression rhs = popExpression();
Expression lhs = popExpression();
rhs = verifyConditionalTypes( lhs, rhs );
IType type = IntervalExpression.getIntervalType( rhs.getType() );
verifyComparable( rhs.getType(), lhs );
if( !lhs.hasImmediateParseIssue( Res.MSG_IMPLICIT_COERCION_ERROR ) &&
!lhs.hasImmediateParseIssue( Res.MSG_TYPE_MISMATCH ) )
{
lhs = possiblyWrapWithImplicitCoercion( lhs, rhs.getType() );
}
else
{
//noinspection ThrowableResultOfMethodCallIgnored
lhs.removeParseException( Res.MSG_IMPLICIT_COERCION_ERROR );
//noinspection ThrowableResultOfMethodCallIgnored
lhs.removeParseException( Res.MSG_TYPE_MISMATCH );
type = IntervalExpression.getIntervalType( lhs.getType() );
verifyComparable( lhs.getType(), rhs );
rhs = possiblyWrapWithImplicitCoercion( rhs, lhs.getType() );
}
IntervalExpression e = new IntervalExpression( bClosed || !bLeftOpen, bClosed || !bRightOpen, lhs, rhs );
verify( e, !bNextTokenIsDotNoWhitespace, Res.MSG_EXTRA_DOT_FOUND_IN_INTERVAL );
//## todo: move to foreach: verify( e, JavaTypes.ITERABLE().isAssignableFrom( type ), Res.MSG_INTERVAL_MUST_BE_ITERABLE_HERE );
e.setType( type );
if( !lhs.hasParseExceptions() && !rhs.hasParseExceptions() )
{
verifyRelationalOperandsComparable( e );
}
pushExpression(e);
}
}
// bitshift-expression
// <additive-expression>
// <bitshift-expression> << <additive-expression>
// <bitshift-expression> >> <additive-expression>
// <bitshift-expression> >>> <additive-expression>
// Left recursion removed from this *grammar* is:
// bitshift-expression
// <addtive-expression> <bitshift-expression2>
// bitshift-expression2
// << <additive-expression> <bitshift-expression2>
// >> <additive-expression> <bitshift-expression2>
// >>> <additive-expression> <bitshift-expression2>
// <null>
void parseBitshiftExpression()
{
Token token = _tokenizer.getCurrentToken();
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
// <additive-expression>
parseAdditiveExpression();
// <bitshift-expression2>
do
{
token = _tokenizer.getCurrentToken();
boolean bLeftShift;
if( (bLeftShift = (SourceCodeTokenizer.TT_OPERATOR == token.getType() && "<<".equals( token.getStringValue() ))) || matchRightShift() )
{
Token T = new Token();
if( bLeftShift )
{
match( T, "<<", SourceCodeTokenizer.TT_OPERATOR );
}
if( T._strValue == null || !T._strValue.equals( "<<" ) )
{
T._strValue = ">>";
Token gr = new Token();
int lastEnd = getTokenizer().getPriorToken().getTokenEnd();
if( match( gr, ">", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
if( lastEnd == gr.getTokenStart() )
{
T._strValue = ">>>";
match( null, ">", SourceCodeTokenizer.TT_OPERATOR );
}
}
}
parseAdditiveExpression();
BitshiftExpression e = new BitshiftExpression();
Expression rhs = popExpression();
// Lhs must be an int or a long
Expression lhs = popExpression();
lhs = ensureOperandIntOrLong( lhs );
// Rhs must be an int
rhs = ensureOperandIntOrLong( rhs );
rhs = possiblyWrapWithImplicitCoercion( rhs, JavaTypes.pINT() );
e.setLHS( lhs );
e.setRHS( rhs );
e.setOperator( T._strValue );
e.setType( lhs.getType() );
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
private boolean matchRightShift()
{
SourceCodeTokenizer tokenizer = getTokenizer();
int mark = tokenizer.mark();
Token t = tokenizer.getTokenAt( mark );
if( t != null && t.getType() == SourceCodeTokenizer.TT_OPERATOR && ">".equals( t.getStringValue() ) )
{
t = tokenizer.getTokenAt( mark + 1 );
if( t != null && t.getType() == SourceCodeTokenizer.TT_OPERATOR && ">".equals( t.getStringValue() ) )
{
tokenizer.nextToken();
tokenizer.nextToken();
return true;
}
}
return false;
}
// additive-expression
// <multiplicative-expression>
// <additive-expression> + <multiplicative-expression>
// <additive-expression> - <multiplicative-expression>
// Left recursion removed from this *grammar* is:
// additive-expression
// <multiplicative-expression> <additive-expression2>
// additive-expression2
// + <multiplicative-expression> <additive-expression2>
// - <multiplicative-expression> <additive-expression2>
// <null>
void parseAdditiveExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
// <multiplicative-expression>
parseMultiplicativeExpression();
// <additive-expression2>
do
{
Token token = getTokenizer().getCurrentToken();
String value = token.getStringValue();
boolean bOperator = token.getType() == SourceCodeTokenizer.TT_OPERATOR && value != null;
boolean bPlus = bOperator &&
(value.equals( "+" ) ||
value.equals( "?+" ) ||
value.equals( "!+" ));
boolean bMinus = !bPlus && bOperator &&
(value.equals( "-" ) ||
value.equals( "?-" ) ||
value.equals( "!-" ));
if( bPlus || bMinus )
{
getTokenizer().nextToken();
parseMultiplicativeExpression();
AdditiveExpression e = new AdditiveExpression();
Expression rhs = popExpression();
Expression lhs = popExpression();
e.setLHS( lhs );
e.setRHS( rhs );
e.setOperator( value );
IType type = resolveTypeForArithmeticExpression( e, lhs.getType(), bPlus ? "+" : "-", rhs.getType() );
e.setType( type );
verify( e, TypeSystem.isNumericType( type ) || value.charAt(0) != '!', Res.MSG_ARITHMETIC_OPERATOR_CANNOT_BE_APPLIED_TO_TYPES, value, lhs.getType().getDisplayName(), rhs.getType().getDisplayName() );
verify( e, !(e.isNullSafe() && e.getType().isPrimitive()), Res.MSG_EXPECTING_REFERENCE_TYPE );
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
// multiplicative-expression
// <unary-expression>
// <multiplicative-expression> * <unary-expression>
// <multiplicative-expression> / <unary-expression>
// <multiplicative-expression> % <unary-expression>
// NOTE: See parseAdditiveExpression() above for an explanation of left recursion removal.
void parseMultiplicativeExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
parseTypeAsExpression();
do
{
Token token = getTokenizer().getCurrentToken();
String value = token.getStringValue();
if( token.getType() == SourceCodeTokenizer.TT_OPERATOR && value != null &&
(value.equals( "*" ) ||
value.equals( "/" ) ||
value.equals( "%" ) ||
value.equals( "?*" ) ||
value.equals( "!*" ) ||
value.equals( "?/" ) ||
value.equals( "?%" )) )
{
getTokenizer().nextToken();
parseTypeAsExpression();
MultiplicativeExpression e = new MultiplicativeExpression();
Expression rhs = popExpression();
Expression lhs = popExpression();
e.setLHS( lhs );
e.setRHS( rhs );
e.setOperator( value );
IType type = resolveTypeForArithmeticExpression( e, lhs.getType(), value, rhs.getType() );
e.setType( type );
verify( e, TypeSystem.isNumericType( type ) || value.charAt(0) != '!', Res.MSG_ARITHMETIC_OPERATOR_CANNOT_BE_APPLIED_TO_TYPES, "!*", lhs.getType().getDisplayName(), rhs.getType().getDisplayName() );
verify( e, !(e.isNullSafe() && e.getType().isPrimitive()), Res.MSG_EXPECTING_REFERENCE_TYPE );
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
void parseTypeAsExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
parseUnaryExpression();
do
{
if( match( null, Keyword.KW_typeas ) || match( null, Keyword.KW_as ))
{
parseTypeLiteral();
TypeAsExpression e = new TypeAsExpression();
Expression rhs = popExpression();
Expression lhs = popExpression();
if( !(rhs instanceof TypeLiteral) )
{
rhs.addParseException( new ParseException( makeFullParserState(), Res.MSG_EXPECTING_TYPELITERAL ) );
}
else
{
IType rhsType = ((TypeLiteral)rhs).getType().getType();
checkComparableAndCastable( lhs, rhs );
e.setType( rhsType );
e.setCoercer( CommonServices.getCoercionManager().resolveCoercerStatically( rhsType, lhs.getType() ) );
verifyTypeVarAreReified( rhs, rhsType );
warn( lhs, lhs.getType() instanceof IErrorType ||
rhs.getType() instanceof IErrorType ||
!rhsType.isAssignableFrom( lhs.getType() ) ||
rhsType.isDynamic(),
Res.MSG_UNNECESSARY_COERCION, lhs.getType().getRelativeName(), rhsType.getRelativeName() );
}
e.setLHS( lhs );
pushExpression( e );
}
else
{
// The <null> case
break;
}
setLocation( iOffset, iLineNum, iColumn );
}
while( true );
}
private boolean checkComparableAndCastable(Expression lhs, Expression rhs )
{
IType rhsType = ((TypeLiteral)rhs).getType().getType();
verify( rhs, rhsType != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
verifyComparable( TypeLord.replaceTypeVariableTypeParametersWithBoundingTypes( rhsType ), lhs, false, false );
boolean hasExceptions = rhs.hasParseExceptions() || lhs.hasParseExceptions();
if( hasExceptions &&
(!(lhs instanceof TypeLiteral) ||
((TypeLiteral)lhs).getType().getType() instanceof TypeVariableType ||
!(rhsType instanceof IGosuClass && ((IGosuClass) rhsType).isStructure())) )
{
IType lhsType = lhs.getType();
if( TypeSystem.canCast( lhsType, rhsType ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
lhs.removeParseException( Res.MSG_TYPE_MISMATCH );
hasExceptions = false;
}
}
return !hasExceptions;
}
// unary-expression
// + <unary-expression>
// - <unary-expression>
// <unary-expression-not-plus-minus>
void parseUnaryExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
Token token = getTokenizer().getCurrentToken();
String value = token.getStringValue();
if( token.getType() == SourceCodeTokenizer.TT_OPERATOR && value != null &&
(value.equals( "+" ) ||
value.equals( "-" ) ||
value.equals( "!-" )) )
{
getTokenizer().nextToken();
boolean unchecked = "!-".equals( value );
boolean negation = value.charAt( 0 ) == '-' || unchecked;
token = getTokenizer().getCurrentToken();
if( negation && atNumberLiteralStart() )
{
parseNumberLiteral( token, true );
setLocation( iOffset, iLineNum, iColumn );
parseBindingExpression( token );
}
else
{
parseUnaryExpressionNotPlusMinus();
UnaryExpression ue = new UnaryExpression();
Expression e = popExpression();
IType type = e.getType();
verify( e, ue.isSupportedType( type ), Res.MSG_NUMERIC_TYPE_EXPECTED );
ue.setNegated( negation );
ue.setUnchecked( unchecked );
if( negation )
{
if( type == JavaTypes.pCHAR() || type == JavaTypes.pBYTE() || type == JavaTypes.pSHORT() )
{
e = possiblyWrapWithCoercion( e, JavaTypes.pINT(), true );
}
else if( type == JavaTypes.CHARACTER() || type == JavaTypes.BYTE() || type == JavaTypes.SHORT() )
{
e = possiblyWrapWithCoercion( e, JavaTypes.INTEGER(), true );
}
}
ue.setExpression( e );
ue.setType( e.getType() );
pushExpression( ue );
}
}
else
{
parseUnaryExpressionNotPlusMinus();
}
setLocation( iOffset, iLineNum, iColumn );
}
// unary-expression-not-plus-minus
// ~ <unary-expression>
// ! <unary-expression>
// not <unary-expression>
// typeof <unary-expression>
// eval( <expression> )
// <primary-expression>
void parseUnaryExpressionNotPlusMinus()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
_parseUnaryExpressionNotPlusMinus();
setLocation( iOffset, iLineNum, iColumn );
}
private void checkMemberAccessIsReadable()
{
Expression expr = peekExpression();
if( expr instanceof MemberAccess )
{
IPropertyInfo pi = ((MemberAccess)expr).getPropertyInfoWithoutThrowing();
if( pi != null )
{
verify( expr, pi.isReadable( getGosuClass() ), Res.MSG_CLASS_PROPERTY_NOT_READABLE, pi.getName(), pi.getOwnersType().getName() );
}
}
else if( (expr instanceof Identifier &&
((Identifier)expr).getSymbol() instanceof DynamicPropertySymbol) )
{
DynamicPropertySymbol dps = (DynamicPropertySymbol)((Identifier)expr).getSymbol();
if( dps != null && dps.getPropertyInfo() != null && !dps.getPropertyInfo().isReadable( getGosuClass() ) )
{
verify( expr, false, Res.MSG_CLASS_PROPERTY_NOT_READABLE, dps.getName(), dps.getScriptPart() == null ? "" : dps.getScriptPart().getContainingType().getName() );
}
}
}
void _parseUnaryExpressionNotPlusMinus()
{
if( match( null, "!", SourceCodeTokenizer.TT_OPERATOR ) || match( null, Keyword.KW_not ) )
{
_ctxInferenceMgr.pushCtx();
try
{
parseUnaryExpression();
}
finally
{
_ctxInferenceMgr.popCtx( false );
}
UnaryNotPlusMinusExpression ue = new UnaryNotPlusMinusExpression();
Expression e = popExpression();
IType type = e.getType();
verify( e, type == JavaTypes.pBOOLEAN() || type == JavaTypes.BOOLEAN() || type.isDynamic(), Res.MSG_TYPE_MISMATCH, "boolean", type.getDisplayName() );
e = possiblyWrapWithImplicitCoercion( e, JavaTypes.pBOOLEAN() );
ue.setExpression( e );
ue.setNot( true );
ue.setType( JavaTypes.pBOOLEAN() );
pushExpression( ue );
}
else if( match( null, "~", SourceCodeTokenizer.TT_OPERATOR ) )
{
pushInferredContextTypes( ContextType.EMPTY );
parseUnaryExpression();
popInferredContextTypes();
UnaryNotPlusMinusExpression ue = new UnaryNotPlusMinusExpression();
Expression e = popExpression();
IType type = e.getType();
if( type == JavaTypes.LONG() || type == JavaTypes.pLONG() )
{
e = possiblyWrapWithImplicitCoercion( e, JavaTypes.pLONG() );
}
else
{
if( verify( e, !type.isDynamic(), Res.MSG_DYNAMIC_TYPE_NOT_ALLOWED_HERE ) )
{
e = ensureOperandIntOrLong( e );
}
}
ue.setExpression( e );
ue.setBitNot( true );
ue.setType( e.getType() );
pushExpression( ue );
}
else if( match( null, Keyword.KW_typeof ) )
{
parseUnaryExpression();
TypeOfExpression toe = new TypeOfExpression();
Expression e = popExpression();
toe.setExpression( e );
pushExpression( toe );
}
else if( match( null, Keyword.KW_statictypeof ) )
{
parseUnaryExpression();
StaticTypeOfExpression toe = new StaticTypeOfExpression();
Expression e = popExpression();
toe.setExpression( e );
pushExpression( toe );
}
else if( match( null, "\\", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseBlockExpression();
}
else if( match( null, Keyword.KW_eval ) )
{
parseEvalExpression();
}
else
{
parsePrimaryExpression();
}
}
private void parseEvalExpression()
{
EvalExpression evalExpr = new EvalExpression( getTypeUsesMap().copy() );
List<ICapturedSymbol> captured = new ArrayList<>();
captureAllSymbols( null, getCurrentEnclosingGosuClass(), captured );
evalExpr.setCapturedSymbolsForBytecode( captured );
evalExpr.setCapturedTypeVars( new HashMap<>( getTypeVariables() ) );
verify( evalExpr, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_EVAL );
parseExpression();
verify( evalExpr, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_EVAL );
Expression e = popExpression();
evalExpr.setExpression( e );
pushExpression( evalExpr );
}
public boolean isCaptureSymbolsForEval()
{
return _bCaptureSymbolsForEval;
}
public void setCaptureSymbolsForEval( boolean bCaputreSymbolsForEval )
{
_bCaptureSymbolsForEval = bCaputreSymbolsForEval;
}
// primary-expression
// <new-expression>
// <block-expression>
// <member-access>
// <array-access>
// ( <expression> )
// <bindable-expression>
void parsePrimaryExpression()
{
final Token token = _tokenizer.getCurrentToken();
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
boolean bForceRedundancy = _parsePrimaryExpression( token );
setLocation( iOffset, iLineNum, iColumn, bForceRedundancy );
Expression eas = peekExpression();
if( recoverFromJavaStyleCast( eas ) )
{
setLocation( iOffset, iLineNum, iColumn, bForceRedundancy );
// re-root the parenthesized expression under the implicit typeas we created
Expression implicitTypeAsFromRecovery = peekExpression();
getLocationsList().remove(eas.getLocation());
implicitTypeAsFromRecovery.getLocation().addChild(eas.getLocation());
eas.setParent( implicitTypeAsFromRecovery );
}
parseIndirectMemberAccess( iOffset, iLineNum, iColumn );
checkMemberAccessIsReadable();
}
boolean _parsePrimaryExpression( Token token )
{
boolean bRet = false;
if( Keyword.KW_block == token.getKeyword() )
{
getTokenizer().nextToken();
_parseBlockLiteral();
}
else if( Keyword.KW_new == token.getKeyword() )
{
getTokenizer().nextToken();
parseNewExpression();
}
else if( parseStandAloneDataStructureInitialization( token ) )
{
bRet = true;
}
else
{
parseBindableExpression( token );
}
return bRet;
}
// bindable-expression
// <simple-expression> [<binder-expression>]
// simple-expression
// ( <expression> )
// <name>
// <method-call>
// <literal>
// binder-expression
// <postfix-binder-expression> [<binder-expression>]
// <prefix-binder-expression> [<binder-expression>]
// <null>
// prefix-binder-expression
// <bindable-expression>
// <null>
// postfix-binder-expression
// <unit-expression>
// <bindable-expression>
// <null>
// unit-expression
// <unit-expression-factor>
// <unit-expression> * <unit-expression-factor>
// <unit-expression> / <unit-expression-factor>
// <null>
// unit-expression-factor
// ( <unit-expression> ) [<binder-expression>]
// <name> [<binder-expression>]
// <null>
void parseBindableExpression()
{
parseBindableExpression( true );
}
void parseBindableExpression( boolean bPostfix )
{
final Token token = _tokenizer.getCurrentToken();
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
parseBindableExpression( token, bPostfix );
setLocation( iOffset, iLineNum, iColumn, false );
}
private void parseBindableExpression( Token token )
{
parseBindableExpression( token, true );
}
private void parseBindableExpression( Token token, boolean bPostfix )
{
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
if( '(' == token.getType() )
{
getTokenizer().nextToken();
parseExpressionNoVerify( isParenthesisTerminalExpression() ? getContextType() : ContextType.EMPTY );
_ctxInferenceMgr.restoreLastCtx();
Expression e = popExpression();
ParenthesizedExpression expr = new ParenthesizedExpression( e );
pushExpression( expr );
setLocation( iOffset, iLineNum, iColumn, false );
verify( e, match( null, ')' ), Res.MSG_EXPECTING_EXPRESSION_CLOSE );
}
else if( parseNameOrMethodCall( token ) )
{
setLocation( iOffset, iLineNum, iColumn, false );
}
else
{
parseLiteral( token );
}
// The Name or MethodCall or Literal expression just parsed may be followed by another
// expression to form a BindingExpression. A BindingExpression consists of two adjacent
// expressions where either the Lhs expression implements IPrefixBinder#prefixBind( t )
// and t is assignable from the Rhs expression type, Or the Rhs expression implements
// IPostfixBinder#postfixBind( t ) and t is assignable from the Lhs expression type.
parseBindingExpression( token, bPostfix );
}
private void parseBindingExpression( Token token )
{
parseBindingExpression( token, true );
}
private void parseBindingExpression( Token token, boolean bPostfix )
{
if( isInSeparateStringTemplateExpression() )
{
return;
}
int mark = getTokenizer().mark();
Token connector = getTokenizer().getCurrentToken();
String connectorString = null;
if( connector.getType() == ISourceCodeTokenizer.TT_OPERATOR &&
SourceCodeTokenizerInternal.getDefaultBindingOperators().contains( connector.getStringValue() ) )
{
connectorString = connector.getStringValue();
getTokenizer().nextToken();
}
//noinspection StatementWithEmptyBody
while( bPostfix
? parsePostfixUnitBindingExpression( token, connectorString ) ||
parsePrefixUnitBindingExpression( token, connectorString )
: parsePrefixUnitBindingExpression( token, connectorString ) ||
parsePostfixUnitBindingExpression( token, connectorString ) )
{
mark = getTokenizer().mark();
connector = getTokenizer().getCurrentToken();
connectorString = null;
if( connector.getType() == ISourceCodeTokenizer.TT_OPERATOR &&
SourceCodeTokenizerInternal.getDefaultBindingOperators().contains( connector.getStringValue() ) )
{
connectorString = connector.getStringValue();
getTokenizer().nextToken();
}
}
if( connectorString != null )
{
_tokenizer.restoreToMark( mark );
}
}
private static final Cache<Pair<String, String>, Boolean> BINDER_ASSIGNABILITY_CACHE =
TypeSystemAwareCache.make( "Binder Assignability Cache", 1000, key -> hasMethod( TypeSystem.getByFullName( key.getFirst() ), key.getSecond() ) );
private static boolean hasMethod_Cached( IType type, String method )
{
if( !type.isPrimitive() && (type instanceof IGosuClass || type instanceof IJavaType) )
{
String name = TypeLord.getPureGenericType( type ).getName();
return BINDER_ASSIGNABILITY_CACHE.get( Pair.make( name, method ) );
}
return false;
}
private boolean parsePrefixUnitBindingExpression( Token priorToken, String connectorString )
{
Expression unitExpr = peekExpression();
if( unitExpr.hasParseExceptions() )
{
return false;
}
if( unitExprType( unitExpr ) instanceof IBlockType )
{
return false;
}
IType unitBinderGenType = GosuTypes.IPREFIX_BINDER();
if( !hasMethod_Cached( unitExprType( unitExpr ), "prefixBind" ) )
{
return false;
}
Token token = getTokenizer().getCurrentToken();
if( token.getLine() != priorToken.getLine() || token.getType() == ISourceCodeTokenizer.TT_EOF )
{
return false;
}
int mark = getTokenizer().mark();
int locationsCount = _locations.size();
parseBindableExpression( false );
Expression primExpr = popExpression();
IType unitBinderType;
do
{
IType unitBinderParamType = unitBinderGenType.getParameterizedType( primExpr.getType(), unitBinderGenType.getGenericTypeVariables()[1].getTypeVariableDefinition().getType() );
unitBinderType = TypeLord.findParameterizedStructureType( unitBinderParamType, unitExprType( unitExpr ) );
if( unitBinderType == null && primExpr instanceof BindingExpression )
{
primExpr = backtrackBinderExpr( locationsCount, primExpr );
}
else
{
break;
}
} while( true );
if( unitBinderType != null )
{
if( !primExpr.hasParseExceptions() && isConnectorStringValid( false, connectorString, unitExpr, unitBinderType ) )
{
popExpression();
IType bindForType = unitBinderType.getTypeParameters()[0];
IType resultType = unitBinderType.getTypeParameters()[1];
primExpr = possiblyWrapWithImplicitCoercion( primExpr, bindForType );
verifyComparable( bindForType, primExpr, true, true );
BindingExpression unitBindingExpr = new BindingExpression( unitExpr, primExpr, bindForType, resultType, mark, true );
pushExpression( unitBindingExpr );
setLocation( priorToken.getTokenStart(), priorToken.getLine(), priorToken.getTokenColumn(), true );
return true;
}
backtrack( mark, locationsCount, primExpr );
}
_tokenizer.restoreToMark( mark );
removeLocationsFrom( locationsCount );
return false;
}
private IType unitExprType( Expression unitExpr )
{
IType type = unitExpr.getType();
if( type.isPrimitive() )
{
type = TypeLord.getBoxedTypeFromPrimitiveType( type );
}
return type;
}
private Expression backtrackBinderExpr( int locationsCount, Expression primExpr )
{
int primMark = ((BindingExpression)primExpr).getMark();
backtrack( primMark, locationsCount, ((BindingExpression)primExpr).getRhsExpr() );
primExpr = ((BindingExpression)primExpr).getLhsExpr();
ParseTree primLocation = primExpr.getLocation();
primLocation.getParent().removeChild( primLocation );
_locations.add( primLocation );
return primExpr;
}
private boolean parsePostfixUnitBindingExpression( Token priorToken, String connectorString )
{
Token token = getTokenizer().getCurrentToken();
if( priorToken.getLine() != token.getLine() )
{
return false;
}
Expression lhsExpr = peekExpression();
if( lhsExpr.hasParseExceptions() )
{
return false;
}
if( lhsExpr.getType() instanceof IBlockType )
{
return false;
}
if( token.getType() == SourceCodeTokenizer.TT_WORD ||
token.getType() == SourceCodeTokenizer.TT_INTEGER ||
token.getType() == SourceCodeTokenizer.TT_NUMBER ||
token.getType() == '"' ||
token.getType() == '\'' ||
token.getType() == '(' )
{
int mark = getTokenizer().mark();
int locationsCount = _locations.size();
if( parseUnitExpression() &&
tryPostfixUnitExpr( mark, locationsCount, lhsExpr, priorToken, connectorString, true ) )
{
// Try with unit expression first because they have precedence
// e.g., 9 mi/hr should parse as (90(mi/hr)), not ((90 mi)/hr)
return true;
}
parseBindableExpression();
return tryPostfixUnitExpr( mark, locationsCount, lhsExpr, priorToken, connectorString, true );
}
return false;
}
private boolean tryPostfixUnitExpr( int mark, int locationsCount, Expression lhsExpr, Token priorToken, String connectorString, boolean testConnector )
{
Expression unitExpr = popExpression();
IType unitBinderGenType = GosuTypes.IPOSTFIX_BINDER();
if( hasMethod_Cached( unitExprType( unitExpr ), "postfixBind" ) )
{
IType unitBinderType;
do
{
IType unitBinderParamType = unitBinderGenType.getParameterizedType( lhsExpr.getType(), unitBinderGenType.getGenericTypeVariables()[1].getTypeVariableDefinition().getType() );
unitBinderType = TypeLord.findParameterizedStructureType( unitBinderParamType, unitExprType( unitExpr ) );
if( unitBinderType == null && unitExpr instanceof BindingExpression )
{
unitExpr = backtrackBinderExpr( locationsCount, unitExpr );
}
else
{
if( unitBinderType != null && testConnector && !isConnectorStringValid( true, connectorString, unitExpr, unitBinderType ) )
{
unitBinderType = null;
}
break;
}
} while( true );
if( unitBinderType != null )
{
if( !lhsExpr.hasParseExceptions() )
{
popExpression();
IType bindForType = unitBinderType.getTypeParameters()[0];
IType resultType = unitBinderType.getTypeParameters()[1];
lhsExpr = possiblyWrapWithImplicitCoercion( lhsExpr, bindForType );
verifyComparable( bindForType, lhsExpr, true, true );
BindingExpression unitBindingExpr = new BindingExpression( lhsExpr, unitExpr, bindForType, resultType, mark, false );
pushExpression( unitBindingExpr );
int iOffset = priorToken.getTokenStart();
int iLineNum = priorToken.getLine();
int iColumn = priorToken.getTokenColumn();
setLocation( iOffset, iLineNum, iColumn, true );
return true;
}
backtrack( mark, locationsCount, lhsExpr );
}
}
backtrack( mark, locationsCount, unitExpr );
return false;
}
private boolean isConnectorStringValid( boolean bPostfix, String connectorString, Expression unitExpr, IType unitBinderType )
{
boolean bValid = connectorString == null || connectorString.isEmpty();
IMethodInfo bindMethod = unitExprType( unitExpr ).getTypeInfo().getCallableMethod( bPostfix ? "postfixBind" : "prefixBind", unitBinderType.getTypeParameters()[0] );
if( bindMethod != null )
{
IAnnotationInfo anno = bindMethod.getAnnotation( GosuTypes.BINDER_SEPARATORS() );
if( anno != null )
{
String[] required;
try
{
required = (String[])anno.getFieldValue( "required" );
}
catch( Exception parseException )
{
required = null;
}
if( connectorString == null || connectorString.isEmpty() )
{
// No connector string provided, valid if one isn't required
bValid = required == null || required.length == 0;
}
else
{
// A connector string is provided
if( required != null && required.length > 0 && Arrays.asList( required ).contains( connectorString ) )
{
// Valid only if a required connector string
bValid = true;
}
else
{
// Valid only if an accepted connector string
String[] accepted;
try
{
accepted = (String[])anno.getFieldValue( "accepted" );
}
catch( Exception parseException )
{
accepted = null;
}
bValid = accepted != null && accepted.length > 0 && Arrays.asList( accepted ).contains( connectorString );
}
}
}
}
return bValid;
}
private boolean parseUnitExpression()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
int mark = getTokenizer().mark();
int locationsCount = _locations.size();
Token token = getTokenizer().getCurrentToken();
if( !parseUnitExpressionFactor( token ) )
{
return false;
}
Expression unitExpr = peekExpression();
IType unitBinderGenType = GosuTypes.IPOSTFIX_BINDER();
if( !StandardCoercionManager.isStructurallyAssignable( unitBinderGenType, unitExprType( unitExpr ) ) )
{
backtrack( mark, locationsCount );
return false;
}
do
{
mark = getTokenizer().mark();
locationsCount = _locations.size();
token = getTokenizer().getCurrentToken();
String value = token.getStringValue();
if( token.getType() == SourceCodeTokenizer.TT_OPERATOR && value != null &&
(value.equals( "*" ) ||
value.equals( "/" )) )
{
getTokenizer().nextToken();
token = getTokenizer().getCurrentToken();
IType multiplyType = TypeSystem.getByFullName( "gw.lang.IMultiply" );
boolean bMultiply = StandardCoercionManager.isStructurallyAssignable( multiplyType, unitExprType( unitExpr ) );
IType divideType = TypeSystem.getByFullName( "gw.lang.IDivide" );
boolean bDivide = StandardCoercionManager.isStructurallyAssignable( divideType, unitExprType( unitExpr ) );
if( !(value.equals( "*" ) && bMultiply) &&
!(value.equals( "/" ) && bDivide) )
{
getTokenizer().restoreToMark( mark );
return true;
}
if( !parseUnitExpressionFactor( token ) )
{
getTokenizer().restoreToMark( mark );
return true;
}
Expression rhs = popExpression();
//IType lhsType = peekExpression().getType();
//unitBinderGenType = unitBinderGenType.getParameterizedType( lhsType, JavaTypes.OBJECT() );
if( !StandardCoercionManager.isStructurallyAssignable( unitBinderGenType, rhs.getType() ) )
{
backtrack( mark, locationsCount, rhs );
return true;
}
Expression lhs = popExpression();
MultiplicativeExpression e = new MultiplicativeExpression();
e.setLHS( lhs );
e.setRHS( rhs );
e.setOperator( value );
IType type = resolveTypeForArithmeticExpression( e, lhs.getType(), value, rhs.getType() );
e.setType( type );
pushExpression( e );
setLocation( iOffset, iLineNum, iColumn );
unitExpr = e;
}
else
{
break;
}
} while( true );
return true;
}
private boolean parseUnitExpressionFactor( Token token )
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
boolean bRes;
if( '(' == token.getType() )
{
int mark = getTokenizer().mark();
getTokenizer().nextToken();
if( parseUnitExpression() )
{
Expression e = popExpression();
ParenthesizedExpression expr = new ParenthesizedExpression( e );
pushExpression( expr );
verify( e, match( null, ')' ), Res.MSG_EXPECTING_EXPRESSION_CLOSE );
bRes = true;
}
else
{
getTokenizer().restoreToMark( mark );
bRes = false;
}
}
else
{
bRes = parseNameOrMethodCall( token );
}
if( bRes )
{
setLocation( iOffset, iLineNum, iColumn );
parseBindingExpression( token );
}
return bRes;
}
private static boolean hasMethod( IType type, String name )
{
if( type.isPrimitive() )
{
return false;
}
ITypeInfo ti = type.getTypeInfo();
if( ti == null )
{
return false;
}
return !ti.getMethods().getMethods( name ).isEmpty();
}
private boolean parseBooleanLiteral( Token token )
{
if( Keyword.KW_true == token.getKeyword() )
{
getTokenizer().nextToken();
BooleanLiteral e = new BooleanLiteral( true );
pushExpression( e );
return true;
}
if( Keyword.KW_false == token.getKeyword() )
{
getTokenizer().nextToken();
BooleanLiteral e = new BooleanLiteral( false );
pushExpression( e );
return true;
}
return false;
}
private boolean parseNullLiteral( Token token )
{
if( Keyword.KW_null == token.getKeyword() )
{
getTokenizer().nextToken();
pushExpression( new NullExpression() );
return true;
}
return false;
}
private boolean parseStandAloneDataStructureInitialization( Token token )
{
return parseStandAloneDataStructureInitialization( token, false, false );
}
private boolean parseStandAloneDataStructureInitialization( Token token, boolean bAvoidContextType, boolean bBacktracking )
{
int mark = getTokenizer().mark();
int iLocationsCount = _locations.size();
Token startToken = token;
// infered data constructors
if( '{' != token.getType() )
{
return false;
}
else
{
getTokenizer().nextToken();
token = getTokenizer().getCurrentToken();
bAvoidContextType = bAvoidContextType || shouldThisExpressionAvoidTheContextType();
IType ctxType = bAvoidContextType ? null : getInitializableType().getType();
NewExpression e = new InferredNewExpression();
boolean bPlaceholder = ctxType != null && ctxType.isDynamic();
if( ctxType == null || bPlaceholder )
{
IInitializerExpression initializer;
IType type;
if( '}' == token.getType() )
{
initializer = new CollectionInitializerExpression();
type = JavaTypes.ARRAY_LIST().getParameterizedType( bPlaceholder ? ctxType : JavaTypes.OBJECT() );
}
else
{
parseExpression( bPlaceholder ? new ContextType( ctxType, false ) : ContextType.OBJECT_FALSE );
Expression initialExpression = popExpression();
Expression actualInitExpr = initialExpression;
if( actualInitExpr instanceof ImplicitTypeAsExpression )
{
actualInitExpr = ((ImplicitTypeAsExpression)actualInitExpr).getLHS();
}
verify( actualInitExpr, actualInitExpr.getType() != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
if( match( null, ',' ) )
{
_parseInitializerExpression( new ContextType( JavaTypes.ARRAY_LIST().getParameterizedType( bPlaceholder ? ctxType : JavaTypes.OBJECT() ), false ) );
CollectionInitializerExpression collectionInit = (CollectionInitializerExpression)popExpression();
collectionInit.addFirst( initialExpression );
IType lub = bPlaceholder ? ctxType : TypeLord.findLeastUpperBound( getTypes( collectionInit.getValues() ) );
type = JavaTypes.ARRAY_LIST().getParameterizedType( lub );
initializer = collectionInit;
}
else if( match( null, "->", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseExpression( bPlaceholder ? new ContextType( ctxType, false ) : ContextType.OBJECT_FALSE );
Expression initialValueExpression = popExpression();
MapInitializerExpression mapInitializer;
if( match( null, ',' ) )
{
parseMapInitializerList( new ContextType( JavaTypes.HASH_MAP().getParameterizedType( bPlaceholder ? ctxType : JavaTypes.OBJECT(),
bPlaceholder ? ctxType : JavaTypes.OBJECT() ), false ) );
mapInitializer = (MapInitializerExpression)popExpression();
}
else
{
mapInitializer = new MapInitializerExpression();
}
mapInitializer.addFirst( initialExpression, initialValueExpression );
IType keysLub = TypeLord.findLeastUpperBound( getTypes( mapInitializer.getKeys() ) );
IType valuesLub = TypeLord.findLeastUpperBound( getTypes( mapInitializer.getValues() ) );
type = JavaTypes.HASH_MAP().getParameterizedType( keysLub, valuesLub );
initializer = mapInitializer;
}
else
{
CollectionInitializerExpression collectionInit= new CollectionInitializerExpression();
collectionInit.addFirst( initialExpression );
IType lub = bPlaceholder ? ctxType : TypeLord.findLeastUpperBound( getTypes( collectionInit.getValues() ) );
type = JavaTypes.ARRAY_LIST().getParameterizedType( lub );
initializer = collectionInit;
}
}
verify( e, match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_INITIALIZER );
pushExpression( (Expression)initializer );
setLocation( startToken.getTokenStart(), startToken.getLine(), startToken.getTokenColumn(), true );
popExpression();
e.setType( type );
e.setConstructor( getImplicitConstructor(type) );
((Expression)initializer).setType( type );
e.setInitializer( initializer );
}
else
{
e.setType( ctxType );
if( !match( null, '}' ) )
{
ContextType typeToInit = getCurrentInitializableContextType();
if( ctxType.isArray() )
{
IType ctxComponentType = ctxType.getComponentType();
List<Expression> valueExpressions = parseArrayValueList( ctxComponentType );
e.setValueExpressions( valueExpressions );
if( !typeToInit.isMethodScoring() )
{
ArrayList<IType> types = new ArrayList<>();
for (Object valueExpression : valueExpressions) {
types.add(((Expression) valueExpression).getType());
}
IType componentLeastUpperBound = TypeLord.findLeastUpperBound( types );
if( componentLeastUpperBound != GosuParserTypes.NULL_TYPE() )
{
//## todo: consider finding a way to preserve the compound type so that it can be coerced to a compatible array type
// if( componentLeastUpperBound instanceof CompoundType )
// e.setType( JavaTypes.ARRAY_LIST().getParameterizedType( componentLeastUpperBound ) );
if( componentLeastUpperBound instanceof CompoundType )
{
// Instead of not supporting compound type arrays (no way in java runtime)
// we grab one component of the type and use that (better than just Object[], right?)
for( IType comp: componentLeastUpperBound.getCompoundTypeComponents() )
{
if( ctxComponentType.isAssignableFrom( comp ) )
{
componentLeastUpperBound = comp;
if( !comp.isInterface() )
{
if( ctxComponentType.isInterface() && comp instanceof IMetaType )
{
componentLeastUpperBound = ctxComponentType;
}
// Favor class type over interface type
break;
}
}
}
}
if( !(componentLeastUpperBound instanceof CompoundType) )
{
e.setType( componentLeastUpperBound.getArrayType());
}
}
}
if( !verify( e, match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_INITIALIZER ) )
{
if( !verify( e, !match( null, "->", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_UNEXPECTED_ARROW ) )
{
e.setType( ErrorType.getInstance() );
}
}
}
else
{
_parseInitializerExpression( new ContextType( e.getType(), false ) );
IInitializerExpression initializerExpression = (IInitializerExpression)popExpression();
e.setInitializer( initializerExpression );
e.setConstructor( ctxType.getTypeInfo().getConstructor() );
if( !typeToInit.isMethodScoring() )
{
IType initializerCtxType = getCurrentInitializableContextType().getType();
if( (initializerCtxType.equals( JavaTypes.MAP() ) ||
initializerCtxType.equals( JavaTypes.HASH_MAP() )) &&
initializerExpression instanceof MapInitializerExpression )
{
MapInitializerExpression mapInitializer = (MapInitializerExpression)initializerExpression;
IType keysLub = TypeLord.findLeastUpperBound( getTypes( mapInitializer.getKeys() ) );
IType valuesLub = TypeLord.findLeastUpperBound( getTypes( mapInitializer.getValues() ) );
if( keysLub != GosuParserTypes.NULL_TYPE() && valuesLub != GosuParserTypes.NULL_TYPE() )
{
e.setType( e.getType().getGenericType().getParameterizedType( keysLub, valuesLub ) );
}
}
else
{
if( ( JavaTypes.COLLECTION().equals( initializerCtxType.getGenericType() ) ||
JavaTypes.LIST().equals( initializerCtxType.getGenericType() ) ||
JavaTypes.ARRAY_LIST().equals( initializerCtxType.getGenericType() ) ||
JavaTypes.LINKED_LIST().equals( initializerCtxType.getGenericType() ) ||
JavaTypes.SET().equals( initializerCtxType.getGenericType() ) ||
JavaTypes.HASH_SET().equals( initializerCtxType.getGenericType() ) ||
JavaTypes.ITERABLE().equals( initializerCtxType.getGenericType() ))
&& initializerExpression instanceof CollectionInitializerExpression )
{
CollectionInitializerExpression collectionInitializerExpression = (CollectionInitializerExpression)initializerExpression;
IType valuesLub = TypeLord.findLeastUpperBound( getTypes( collectionInitializerExpression.getValues() ) );
if( valuesLub != GosuParserTypes.NULL_TYPE() )
{
e.setType( e.getType().getGenericType().getParameterizedType( valuesLub ) );
}
}
}
}
if( !verify( e, match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_INITIALIZER ) )
{
if( !verify( e, !match( null, "->", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_UNEXPECTED_ARROW ) )
{
e.setType( ErrorType.getInstance() );
}
}
pushExpression( (Expression)initializerExpression );
setLocation( startToken.getTokenStart(), startToken.getLine(), startToken.getTokenColumn(), true );
popExpression();
if( !bBacktracking && initializerExpression.hasParseExceptions() )
{
return maybeReparseWithoutContextType( mark, iLocationsCount, (Expression)initializerExpression );
}
}
}
else
{
e.setConstructor( ctxType.getTypeInfo().getConstructor() );
}
}
IType componentType = e.getType().isArray() ? e.getType().getComponentType() : e.getType();
if( !(componentType instanceof IJavaType) )
{
verifyTypeVarAreReified( e, componentType );
}
pushExpression( e );
return true;
}
}
private boolean maybeReparseWithoutContextType( int mark, int iLocationsCount, Expression initializerExpression )
{
backtrack( mark, iLocationsCount, initializerExpression );
Token token = getTokenizer().getCurrentToken();
boolean bRes = parseStandAloneDataStructureInitialization( token, true, true );
if( peekExpression().hasParseExceptions() )
{
backtrack( mark, iLocationsCount, initializerExpression );
return parseStandAloneDataStructureInitialization( token, false, true );
}
else
{
return bRes;
}
}
private boolean shouldThisExpressionAvoidTheContextType() {
int mark = getTokenizer().mark();
eatBlock( '{', '}', false );
Token token = getTokenizer().getCurrentToken();
String value = token.getStringValue();
boolean bAvoidContextType =
'.' == token.getType() ||
'[' == token.getType() ||
(token.getType() == SourceCodeTokenizer.TT_OPERATOR &&
("?.".equals( value ) ||
"*.".equals( value ) ||
"==".equals( value ) ||
"!=".equals( value ) ||
"===".equals( value ) ||
"!==".equals( value ) ||
"#".equals( value ) ||
"?".equals( value ) ||
"?[".equals( value )));
getTokenizer().restoreToMark( mark );
return bAvoidContextType;
}
private List<IType> getTypes( List<? extends IExpression> list )
{
if( list == null )
{
return Collections.emptyList();
}
ArrayList<IType> returnList = new ArrayList<>( list.size() );
for( int i = 0; i < list.size(); i++ )
{
IExpression expression = list.get( i );
if( !(expression instanceof NullExpression) || (i == list.size()-1 && returnList.isEmpty()) ) // don't include NullExpression's Object type in the presence of non-'null' expressions, 'null' assumes the type of the LUB of the other types
{
returnList.add( expression.getType() );
}
}
return returnList;
}
private ContextType getInitializableType()
{
ContextType typeToInit = getCurrentInitializableContextType();
if( typeToInit.getType() == null )
{
return typeToInit;
}
if( typeToInit.getType().isInterface() )
{
typeToInit = new ContextType( findImpl( typeToInit.getType() ), typeToInit.isMethodScoring() );
}
if( typeToInit.getType() != null &&
(typeToInit.getType().isArray() ||
typeToInit.getType().getTypeInfo().getConstructor() != null) )
{
return typeToInit; // An array or collection type with a default constructor
}
return ContextType.EMPTY;
}
private ContextType getCurrentInitializableContextType()
{
ContextType ctxType = getContextType();
return supportsInitializer( ctxType.getType() ) ? ctxType : ContextType.EMPTY;
}
public static IType findImpl( IType typeToInit )
{
IType genericType;
if (typeToInit.isParameterizedType()) {
genericType = typeToInit.getGenericType();
} else {
genericType = typeToInit;
}
if( genericType.equals( JavaTypes.LIST() ) ||
genericType.equals( JavaTypes.COLLECTION() ) ||
genericType.equals( JavaTypes.ITERABLE() ))
{
IJavaType arrayListType = JavaTypes.ARRAY_LIST();
if( typeToInit.isParameterizedType() )
{
arrayListType = (IJavaType)arrayListType.getParameterizedType( typeToInit.getTypeParameters() );
}
return arrayListType;
}
else if( genericType.equals( JavaTypes.SET() ) )
{
IJavaType arrayListType = JavaTypes.HASH_SET();
if( typeToInit.isParameterizedType() )
{
arrayListType = (IJavaType)arrayListType.getParameterizedType( typeToInit.getTypeParameters() );
}
return arrayListType;
}
else if( genericType.equals( JavaTypes.MAP() ) )
{
IJavaType hashMapType = JavaTypes.HASH_MAP();
if( typeToInit.isParameterizedType() )
{
hashMapType = (IJavaType)hashMapType.getParameterizedType( typeToInit.getTypeParameters() );
}
return hashMapType;
}
else
{
return null;
}
}
private boolean supportsInitializer( IType type )
{
if( type == null )
{
return false;
}
return type.isArray() ||
JavaTypes.MAP().isAssignableFrom( type ) ||
JavaTypes.SET().isAssignableFrom( type ) ||
JavaTypes.LIST().isAssignableFrom( type ) ||
JavaTypes.COLLECTION().equals( type.getGenericType() ) ||
JavaTypes.ITERABLE().equals( type.getGenericType() ) ||
type.isDynamic();
}
private void parseBlockExpression()
{
ISymbolTable symTable = getSymbolTable();
boolean pushed = false;
int originaliBreakOk = _iBreakOk;
_iBreakOk = 0;
int originaliContinueOk = _iContinueOk;
_iContinueOk = 0;
_iReturnOk++;
try
{
ContextType contextType = getContextType();
IType[] unbound = {null};
IType expectedBlockReturnType = inferReturnTypeForBlockArgument( contextType, unbound );
_blockReturnTypeStack.push( expectedBlockReturnType );
pushed = true;
boolean foundArrow;
BlockExpression block = new BlockExpression();
StandardScope blockScope = new StandardScope();
symTable.pushScope( blockScope );
try
{
block.setScope( blockScope );
//If there are arguments, parse them
if( !match( null, "->", SourceCodeTokenizer.TT_OPERATOR ) )
{
//Infer the parameter types of the block
List<IType> inferredContextTypes = getContextTypesForBlockArgument( contextType );
IType type = contextType.getType();
ArrayList<ISymbol> args = parseParameterDeclarationList( block, false, inferredContextTypes, false, false, false, type != null && type.isDynamic() );
args.forEach( _symTable::putSymbol );
foundArrow = verify( block, match( null, "->", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_ARROW_AFTER_BLOCK_ARGS );
block.setArgs( args );
}
else
{
foundArrow = true;
block.setArgs( Collections.<ISymbol>emptyList() );
}
verify( block, block.getArgs().size() <= IBlock.MAX_ARGS, Res.MSG_BLOCKS_CAN_HAVE_A_MOST_SIXTEEN_ARGS );
pushCurrentBlock( block );
try
{
//parse the expression body
if(foundArrow) {
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( null, '{' ) )
{
_ctxInferenceMgr.pushLoopCompromised();
pushInferredContextTypes( ContextType.EMPTY );
try
{
parseStatementBlock();
}
finally
{
popInferredContextTypes();
_ctxInferenceMgr.popLoopCompromised();
}
if( peekStatement() instanceof StatementList )
{
setLocation( iOffset, iLineNum, iColumn );
}
Statement blockBody = popStatement();
if( expectedBlockReturnType != null && expectedBlockReturnType != GosuParserTypes.NULL_TYPE() )
{
boolean[] bAbsolute = {false};
ITerminalStatement term = blockBody.getLeastSignificantTerminalStatement( bAbsolute );
Statement verifyStmt;
if (blockBody instanceof StatementList &&
((StatementList) blockBody).getStatements() != null &&
((StatementList) blockBody).getStatements().length > 0)
{
StatementList lst = (StatementList)blockBody;
verifyStmt = lst.getStatements()[lst.getStatements().length - 1];
}
else
{
verifyStmt = blockBody;
}
verify( verifyStmt, term != null && (bAbsolute[0] || term.getTerminalType() == TerminalType.ForeverLoop), Res.MSG_MISSING_RETURN );
}
block.setBody( blockBody );
}
else
{
int tokenizerPostion = getTokenizer().getTokenStart();
parseExpression( expectedBlockReturnType == null ? ContextType.EMPTY : new ContextType( expectedBlockReturnType, unbound[0], false ) );
Expression exprBody = popExpression();
// void functions can work in the body of a block
//noinspection ThrowableResultOfMethodCallIgnored
exprBody.removeParseException( Res.MSG_VOID_EXPRESSION_NOT_ALLOWED );
//If someone is trying to do a naked assignment, parse it and give a good error message
if( matchAssignmentOperator() != null )
{
parseExpression();
Expression assignmentBody = popExpression();
verify( assignmentBody, false, Res.MSG_ASSIGNMENTS_MUST_BE_ENCLOSED_IN_CURLIES_IN_BLOCKS );
}
else if( tokenizerPostion == getTokenizer().getTokenStart() )
{
//If someone is trying to do a naked return, parse the expression and give a good error message
int mark = getTokenizer().mark();
if( match( null, Keyword.KW_return ) )
{
String strToken = getTokenizer().getTokenAt( mark ).getStringValue();
parseExpression();
Expression returnBody = popExpression();
addError( returnBody, Res.MSG_STATEMENTS_MUST_BE_ENCLOSED_IN_CURLIES_IN_BLOCKS, strToken );
}
else if( match( null, Keyword.KW_var ) ||
match( null, Keyword.KW_switch ) ||
match( null, Keyword.KW_if ) )
{
String strToken = getTokenizer().getTokenAt( mark ).getStringValue();
addError( exprBody, Res.MSG_STATEMENTS_MUST_BE_ENCLOSED_IN_CURLIES_IN_BLOCKS, strToken );
}
}
// expressions with return values are acceptable when a block is going to discard the value
if( JavaTypes.pVOID().equals( expectedBlockReturnType ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
exprBody.removeParseException( Res.MSG_TYPE_MISMATCH );
}
block.setBody( exprBody );
}
}
block.setBlockReturnType( getBlockReturnType( block.getBody(), expectedBlockReturnType ) );
block.setScope( null );
block.updateGosuClass();
pushExpression( block );
}
finally
{
popCurrentBlock();
}
}
finally
{
symTable.popScope();
}
}
finally
{
_iBreakOk = originaliBreakOk;
_iContinueOk = originaliContinueOk;
_iReturnOk
if( pushed ) {
_blockReturnTypeStack.pop();
}
}
}
private IType getBlockReturnType( IParsedElement blockBody, IType ctxType )
{
if( blockBody == null )
{
return ErrorType.getInstance();
}
if( ctxType == JavaTypes.pVOID() )
{
return ctxType;
}
if( blockBody instanceof Expression )
{
return ((Expression)blockBody).getType();
}
else
{
Statement stmt = (Statement) blockBody;
ArrayList<ReturnStatement> returnStatements = new ArrayList<>();
ArrayList<IType> returnTypes = new ArrayList<>();
//noinspection unchecked
stmt.getContainedParsedElementsByTypesWithIgnoreSet( (List) returnStatements,
new HashSet( Arrays.asList( BlockExpression.class ) ),
ReturnStatement.class );
// The statement body must contain only throw statements, so
// the return type should just be whatever the context type is
if( returnStatements.size() == 0 )
{
return ctxType;
}
else
{
for( ReturnStatement returnStmt : returnStatements )
{
returnTypes.add( returnStmt.getValue().getType() );
}
return TypeLord.findLeastUpperBound( returnTypes );
}
}
}
private List<IType> getContextTypesForBlockArgument( ContextType ctxType )
{
if( ctxType == null )
{
return null;
}
IType type = ctxType.getType();
if( type == null )
{
return null;
}
IType alternateType = ctxType.getAlternateType();
if( type instanceof FunctionType )
{
if( alternateType instanceof FunctionType )
{
// Alternate type includes type vars so that untyped parameters in the block can potentially be inferred *after* the block expression parses
type = alternateType;
}
else if( alternateType != null )
{
type = alternateType.getFunctionalInterface();
}
return Arrays.asList( ((FunctionType)type).getParameterTypes() );
}
else
{
IFunctionType functionType = type.getFunctionalInterface();
if( functionType != null )
{
if( alternateType instanceof FunctionType )
{
// Alternate type includes type vars so that untyped parameters in the block can potentially be inferred *after* the block expression parses
functionType = (IFunctionType)alternateType;
}
else if( alternateType != null )
{
functionType = alternateType.getFunctionalInterface();
}
return Arrays.asList( functionType.getParameterTypes() );
}
}
return null;
}
private IType inferReturnTypeForBlockArgument( ContextType contextType, IType[] unbound )
{
if( contextType.isMethodScoring() )
{
return null;
}
IType ctxType = contextType.getType();
if( ctxType == null )
{
return null;
}
IType returnType = null;
IType alternateType = contextType.getAlternateType();
if( ctxType instanceof FunctionType )
{
if( alternateType instanceof FunctionType )
{
// Alternate type includes type vars so that untyped parameters in the block can potentially be inferred *after* the block expression parses
ctxType = contextType.getAlternateType();
}
else if( alternateType != null )
{
ctxType = alternateType.getFunctionalInterface();
}
returnType = ((FunctionType)ctxType).getReturnType();
}
else
{
IFunctionType functionType = ctxType.getFunctionalInterface();
if( functionType != null )
{
if( alternateType instanceof FunctionType )
{
// Alternate type includes type vars so that untyped parameters in the block can potentially be inferred *after* the block expression parses
functionType = (IFunctionType)alternateType;
}
else if( alternateType != null )
{
functionType = alternateType.getFunctionalInterface();
}
returnType = functionType.getReturnType();
}
}
IFunctionType functionType = ctxType.getFunctionalInterface();
if( functionType != null )
{
IType iType = functionType.getReturnType();
if( returnType == null )
{
returnType = iType;
}
else if( !returnType.equals( iType ) )
{
return null;
}
}
// If we are currently inferring the return type, use the bounding type to parse
// on the way in so that we get the actual type on the way out to infer with
IType result = TypeLord.boundTypes( returnType, getCurrentlyInferringFunctionTypeVars() );
unbound[0] = result != returnType ? returnType : null;
return result;
}
/**
* <i>new-expression</i>
* <b>new</b> <type-expression> <b>(</b> [<argument-list>] <b>)</b> <b>{</b> [<initialization-expression>] <b>}</b>
* <b>new</b> <type-expression> <b>[</b> <expression> <b>]</b>
* <b>new</b> <type-expression> <b>[</b><b>]</b> <b>{</b> [<array-value-list>] <b>}</b>
*/
void parseNewExpression()
{
parseNewExpressionOrAnnotation( false );
}
void parseNewExpressionOrAnnotation( boolean bAnnotation )
{
_parseNewExpressionOrAnnotation( bAnnotation, false );
}
void _parseNewExpressionOrAnnotation( boolean bAnnotation, boolean bBacktracking )
{
boolean original = isParsingAnnotation();
setParsingAnnotation( bAnnotation );
try
{
int mark = getTokenizer().mark();
int iLocationsCount = _locations.size();
TypeLiteral typeLiteral = null;
if( match( null, null, '(', true ) && isParenthesisTerminalExpression( true ) )
{
// handle typeless 'new()' syntax
typeLiteral = maybeInferTypeLiteralFromContextType();
}
if( typeLiteral == null )
{
parseTypeLiteralForNewExpression();
typeLiteral = (TypeLiteral)popExpression();
IType type = typeLiteral.getType().getType();
if( !bBacktracking )
{
if( type.isParameterizedType() && TypeLord.deriveParameterizedTypeFromContext( type.getGenericType(), null ) == type )
{
// Try to infer the constructed type from args rather than assume the default type
typeLiteral.setType( MetaType.getLiteral( type.getGenericType() ) );
}
}
else if( type == TypeLord.getPureGenericType( type ) )
{
// Ensure we never construct a raw generic type
typeLiteral.setType( MetaType.getLiteral( TypeLord.deriveParameterizedTypeFromContext( type, null ) ) );
}
}
verify( typeLiteral, !(typeLiteral instanceof BlockLiteral), Res.MSG_BLOCKS_LITERAL_NOT_ALLOWED_IN_NEW_EXPR );
IType declaringClass = typeLiteral.getType().getType();
if( isParsingStaticFeature() )
{
IType type = typeLiteral.getType().getType();
while( type.getEnclosingType() != null )
{
if( type instanceof IGosuClass && verify( typeLiteral, ((IGosuClass)type).isStatic(), Res.MSG_CANNOT_INSTANTIATE_NON_STATIC_CLASSES_HERE ) )
{
type = type.getEnclosingType();
}
else
{
break;
}
}
}
verify( typeLiteral, !declaringClass.isEnum() || getTokenizer().getCurrentToken().getType() == '[', Res.MSG_ENUM_CONSTRUCTOR_NOT_ACCESSIBLE );
parseNewExpressionOrAnnotation( declaringClass, bAnnotation, false, typeLiteral, mark );
if( !bBacktracking && typeLiteral.getType().getType().isGenericType() && !typeLiteral.getType().getType().isParameterizedType() )
{
// We didn't infer type parameters from the ctor's parameter types using the raw generic type, backtrack and just use the default generic type
backtrack( mark, iLocationsCount );
_parseNewExpressionOrAnnotation( bAnnotation, true );
}
}
finally
{
setParsingAnnotation( original );
}
}
private TypeLiteral maybeInferTypeLiteralFromContextType()
{
TypeLiteral typeLiteral = null;
IType ctxType = getContextType().getType();
if( ctxType != null && !getContextType().isMethodScoring() )
{
typeLiteral = new InferredTypeLiteral( ctxType );
pushExpression( typeLiteral );
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
setLocation( iOffset, iLineNum, iColumn, true );
popExpression();
}
return typeLiteral;
}
void parseNewExpressionOrAnnotation( IType declaringClass, boolean bAnnotation, boolean bNoArgNoParenthesis, final TypeLiteral typeLiteral, int mark )
{
int iParenStart = _tokenizer.getTokenStart();
NewExpression e = bAnnotation ? new AnnotationExpression() : new NewExpression();
e.setType( declaringClass );
e.setTypeLiteral(typeLiteral);
verifyCanConstructInnerClassFromCallSite( e, declaringClass );
if( bNoArgNoParenthesis || match( null, '(' ) )
{
boolean bAnonymous = false;
boolean bAssumeClosingParenMatch = true;
e.setArgPosition( iParenStart + 1 );
List<IConstructorType> listConstructorTypes = ((declaringClass instanceof ITypeVariableType || !declaringClass.isInterface()) || isAnnotation( declaringClass ))
? getPreliminaryConstructorTypes( declaringClass, e )
: Collections.<IConstructorType>emptyList();
scrubAnnotationConstructors(declaringClass, listConstructorTypes);
boolean bNoArgsProvided = false;
if( !bNoArgNoParenthesis &&
(!(bNoArgsProvided = match( null, null, ')', true )) ||
listConstructorTypes.size() > 0 && listConstructorTypes.get( 0 ).hasOptionalParams()) )
{
MethodScore bestConst = parseArgumentList( declaringClass, e, listConstructorTypes, null, !(declaringClass instanceof ErrorType), bNoArgsProvided );
IConstructorType constructorType = null;
if( bestConst.isValid() )
{
declaringClass = maybeChangeToInferredType( declaringClass, typeLiteral, bestConst );
constructorType = (IConstructorType)(bestConst.getInferredFunctionType() == null ? bestConst.getRawFunctionType() : bestConst.getInferredFunctionType());
List<IExpression> args = bestConst.getArguments();
verifyArgCount( e, args.size(), constructorType );
//noinspection SuspiciousToArrayCall
e.setArgs( args.toArray( new Expression[args.size()] ) );
e.setType( declaringClass );
e.setConstructor( constructorType.getConstructor() );
IType[] argTypes = constructorType.getParameterTypes();
e.setArgTypes( argTypes );
e.setNamedArgOrder( bestConst.getNamedArgOrder() );
}
else
{
verify( e, false, Res.MSG_NO_CONSTRUCTOR_FOUND_FOR_CLASS, declaringClass.getName() );
e.setType( ErrorType.getInstance() );
}
bAssumeClosingParenMatch = verify( e, match( null, ')' ), Res.MSG_EXPECTING_FUNCTION_CLOSE );
bAnonymous = !isInitializableType( e.getType() ) && match( null, null, '{', true );
verifyConstructorIsAccessible( declaringClass, e, constructorType, bAnonymous );
}
else
{
if( bNoArgsProvided )
{
match( null, ')' );
}
try
{
IConstructorType constructorType = getConstructorType( declaringClass, new Expression[0], null, this );
e.setType( declaringClass );
e.setConstructor( constructorType.getConstructor() );
bAnonymous = !isInitializableType( e.getType() ) && match( null, null, '{', true );
verifyConstructorIsAccessible( declaringClass, e, constructorType, bAnonymous );
}
catch( ParseException pe )
{
boolean possibleAnonymousClassDecl = declaringClass.isInterface() && !isInitializableType( declaringClass );
boolean possibleDataStructDecl = isConcreteInitializableType( declaringClass );
if( (possibleAnonymousClassDecl || possibleDataStructDecl) && match( null, null, '{', true ) )
{
// Assume we are constructing an anonymous class on an interface or a data structure
bAnonymous = true;
}
else
{
e.setType( declaringClass );
IConstructorInfo firstCtor = getConstructor( declaringClass );
e.setConstructor( firstCtor );
e.addParseException( pe );
}
}
}
if( typeLiteral != null && (!(typeLiteral.getType().getType() instanceof IJavaType) || bAnonymous) )
{
verifyTypeVarAreReified( typeLiteral, typeLiteral.getType().getType() );
}
if( match( null, null, '{', true ) )
{
if( isInitializableType( e.getType() ) )
{
Token startToken = getTokenizer().getCurrentToken();
match( null, '{' );
if( !match( null, '}' ) )
{
_parseInitializerExpression( new ContextType( e.getType() ) );
IInitializerExpression initializerExpression = (IInitializerExpression)peekExpression();
e.setInitializer( initializerExpression );
verify( e, match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_INITIALIZER );
setLocation( startToken.getTokenStart(), startToken.getLine(), startToken.getTokenColumn() );
popExpression();
}
}
else if( !(declaringClass instanceof ErrorType) )
{
int state = _tokenizer.mark();
//look ahead 2 to see if this is an object initializer
if( !declaringClass.isAbstract() && !declaringClass.isInterface() &&
match( null, '{' ) && match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
_tokenizer.restoreToMark( state );
parseObjectInitializer( typeLiteral.evaluate() );
ObjectInitializerExpression expression = (ObjectInitializerExpression)popExpression();
e.setInitializer( expression );
}
else if( bAssumeClosingParenMatch )
{
// Assume we are constructing an anonymous class on a gosu or java type.
declaringClass = parseAnonymousInnerClass( declaringClass, typeLiteral, e, state, mark );
}
}
}
if( e.getConstructor() != null )
{
// Prevent abstract types from being constructed (annotation interfaces are ok though)
IType ownersType = e.getConstructor().getOwnersType();
boolean bAnnotationType = ownersType instanceof ICanBeAnnotation && ((ICanBeAnnotation)ownersType).isAnnotation() && JavaTypes.ANNOTATION().isAssignableFrom( ownersType );
verify( e, bAnnotationType || !ownersType.isAbstract() || declaringClass instanceof ITypeVariableType, Res.MSG_CANNOT_CONSTRUCT_ABSTRACT_CLASS, declaringClass.getName() );
// Prevent recursive types from being constructed directly
warn( e, declaringClass instanceof ITypeVariableType || !TypeLord.isRecursiveType( declaringClass ), Res.MSG_CANNOT_CONSTRUCT_RECURSIVE_CLASS, declaringClass.getName() );
}
pushExpression( e );
}
else if( !bAnnotation )
{
if( typeLiteral != null && !(typeLiteral.getType().getType() instanceof IJavaType) )
{
verifyTypeVarAreReified( typeLiteral, typeLiteral.getType().getType() );
}
if( verify( e, match( null, '[' ), Res.MSG_EXPECTING_NEW_ARRAY_OR_CTOR ) )
{
if( match( null, ']' ) )
{
int numArrays = 1;
while( match( null, '[' ) )
{
verify( e, match( null, ']' ), Res.MSG_EXPECTING_ARRAY_BRACKET );
++numArrays;
}
for( int i = 0; i < numArrays; i++ )
{
declaringClass = declaringClass.getArrayType();
}
verify( e, match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_NEW_ARRAY );
e.setType( declaringClass );
if( match( null, '}' ) )
{
e.setValueExpressions( null );
}
else
{
List valueExpressions = parseArrayValueList( declaringClass.getComponentType() );
verify( e, match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_NEW_ARRAY );
//noinspection unchecked
e.setValueExpressions( valueExpressions );
}
}
else
{
parseExpression( ContextType.pINT_FALSE );
e.addSizeExpression( popExpression() );
IType arrayType = null;
boolean bSizelessDimension = false;
while( verify( e, match( null, ']' ), Res.MSG_EXPECTING_ARRAY_BRACKET ) )
{
declaringClass = declaringClass.getArrayType();
arrayType = declaringClass;
if( match( null, '[' ) )
{
if( !bSizelessDimension && !match( null, null, ']', true ) )
{
parseExpression( ContextType.pINT_FALSE );
e.addSizeExpression( popExpression() );
}
else
{
bSizelessDimension = true;
}
}
else
{
break;
}
}
if( arrayType != null )
{
declaringClass = arrayType;
}
else
{
declaringClass = ErrorType.getInstance();
}
e.setType( declaringClass );
}
}
pushExpression( e );
}
else
{
if( !(declaringClass instanceof ErrorType) )
{
ITypeInfo typeInfo = declaringClass.getTypeInfo();
if( typeInfo != null )
{
IConstructorInfo iConstructorInfo = typeInfo.getConstructor();
if (iConstructorInfo == null) {
outer:
for (IConstructorInfo constructorInfo : typeInfo.getConstructors()) {
if (constructorInfo instanceof IOptionalParamCapable) {
IExpression[] defaultVals = ((IOptionalParamCapable) constructorInfo).getDefaultValueExpressions();
for (IExpression defaultVal : defaultVals) {
if (defaultVal == null) {
continue outer;
}
}
iConstructorInfo = constructorInfo;
break;
}
}
}
if( verify( e, iConstructorInfo != null, Res.MSG_NO_DEFAULT_CTOR_IN, declaringClass.getName() ) )
{
e.setType( declaringClass );
e.setConstructor( iConstructorInfo );
e.setArgTypes();
e.setType( declaringClass );
}
else
{
e.setType( ErrorType.getInstance() );
}
}
else
{
e.setType( ErrorType.getInstance() );
}
}
else
{
e.setType( ErrorType.getInstance() );
}
pushExpression( e );
}
IConstructorInfo constructor = e.getConstructor();
if( (constructor != null) && (constructor.isDeprecated()) )
{
IParserState state;
if( typeLiteral == null )
{
state = null;
}
else
{
state = new IParserState()
{
public int getLineNumber()
{
return typeLiteral.getLocation().getLineNum();
}
public int getTokenColumn()
{
return typeLiteral.getLocation().getColumn();
}
public String getSource()
{
return getTokenizer().getSource();
}
public int getTokenStart()
{
return typeLiteral.getLocation().getOffset();
}
public int getTokenEnd()
{
return typeLiteral.getLocation().getExtent();
}
public int getLineOffset()
{
return getTokenizer().getLineOffset();
}
public IParserState cloneWithNewTokenStartAndTokenEnd( int newTokenStart, int newLength )
{
return null;
}
};
}
//noinspection ThrowableInstanceNeverThrown
e.addParseWarning( new ParseWarningForDeprecatedMember( state,
TypeInfoUtil.getConstructorSignature( constructor ),
constructor.getContainer().getName() ) );
}
}
private IType maybeChangeToInferredType( IType declaringClass, TypeLiteral typeLiteral, MethodScore bestConst )
{
ConstructorType inferredFunctionType = (ConstructorType)bestConst.getInferredFunctionType();
if( inferredFunctionType != null &&
!(declaringClass instanceof ITypeVariableType) &&
declaringClass != inferredFunctionType.getDeclaringType() )
{
declaringClass = inferredFunctionType.getDeclaringType();
typeLiteral.setType( declaringClass );
}
return declaringClass;
}
//## todo: remove this after removal of old-style Gosu "annotations" when there will only be a single ctor
private void scrubAnnotationConstructors(IType declaringClass, List<IConstructorType> listConstructorTypes) {
if( declaringClass instanceof IGosuClass ) {
// We only include one ctor for an annotation implemented in Gosu, so let it be called with or without named args
return;
}
if (JavaTypes.ANNOTATION().isAssignableFrom(declaringClass)) {
if (match(null, ":", ISourceCodeTokenizer.TT_OPERATOR, true)) {
// Arg[s] are named, so only include the one ctor with default params (this is the new/conventional way)
Iterator<IConstructorType> iter = listConstructorTypes.iterator();
while (iter.hasNext()) {
IConstructorType next = iter.next();
IConstructorInfo constructor = next.getConstructor();
if ( !(constructor instanceof ConstructorInfoBuilder.IBuilt &&
((ConstructorInfoBuilder.IBuilt)constructor).getUserData() == AnnotationConstructorGenerator.STANDARD_CTOR_WITH_DEFAULT_PARAM_VALUES) ) {
// Remove all but the one standard ctor
iter.remove();
}
}
} else {
// Arg[s] are not named, only include "legacy" constructors to support old-style ordered arg passing (this is effectively deprecated)
Iterator<IConstructorType> iter = listConstructorTypes.iterator();
while (iter.hasNext()) {
IConstructorType next = iter.next();
IConstructorInfo constructor = next.getConstructor();
if ( constructor instanceof ConstructorInfoBuilder.IBuilt &&
((ConstructorInfoBuilder.IBuilt)constructor).getUserData() == AnnotationConstructorGenerator.STANDARD_CTOR_WITH_DEFAULT_PARAM_VALUES ) {
// Remove the standard ctor
iter.remove();
break;
}
}
}
}
}
private boolean isAnnotation( IType type )
{
return JavaTypes.ANNOTATION().isAssignableFrom( type ) ||
JavaTypes.IANNOTATION().isAssignableFrom( type );
}
private ArrayList<IConstructorType> getPreliminaryConstructorTypes( IType declaringClass, NewExpression e )
{
// Get a preliminary constructorTypes to check arguments. Note we do this to aid in error feedback and value popup completion.
ArrayList<IConstructorType> listConstructorTypes = new ArrayList<>( 2 );
try
{
getConstructorType( declaringClass, null, listConstructorTypes, this );
}
catch( ParseException pe )
{
IConstructorInfo firstCtor = getConstructor( declaringClass );
e.setConstructor( firstCtor );
e.addParseException( pe );
}
IType[][] argTypesArray = new IType[listConstructorTypes.size()][];
IParameterInfo[][] paramTypesPossible = new IParameterInfo[listConstructorTypes.size()][];
for( int i = 0; i < argTypesArray.length; i++ )
{
IConstructorType ctorType = listConstructorTypes.get( i );
paramTypesPossible[i] = ctorType.getConstructor().getParameters();
argTypesArray[i] = ctorType.getParameterTypes();
}
return listConstructorTypes;
}
private IType parseAnonymousInnerClass( IType declaringClass, TypeLiteral typeLiteral, NewExpression newExpr, int state, int mark )
{
_tokenizer.restoreToMark( state );
newExpr.setAnonymousClass( true );
IGosuClassInternal gsDeclaringClass = null;
if( declaringClass instanceof IJavaType )
{
gsDeclaringClass = GosuClassProxyFactory.instance().create( declaringClass );
if( declaringClass.isParameterizedType() )
{
gsDeclaringClass = (IGosuClassInternal)gsDeclaringClass.getParameterizedType( declaringClass.getTypeParameters() );
}
}
else if( declaringClass instanceof IGosuClassInternal )
{
gsDeclaringClass = (IGosuClassInternal)declaringClass;
}
else
{
verify( newExpr, false, Res.MSG_BAD_ANONYMOUS_CLASS_DECLARATION );
}
if( gsDeclaringClass != null && typeLiteral != null )
{
declaringClass = TypeLord.makeDefaultParameterizedType( declaringClass );
ICompilableTypeInternal enclosingType = getCurrentEnclosingGosuClass();
// For now anonymous classes must be enclosed in a gsclass.
if( verify( newExpr, enclosingType instanceof IGosuClassInternal, Res.MSG_ANONYMOUS_CLASS_NOT_ALLOWED_HERE ) )
{
int iNameOffset = typeLiteral.getLocation().getOffset();
int originaliBreakOk = _iBreakOk;
_iBreakOk = 0;
int originaliContinueOk = _iContinueOk;
_iContinueOk = 0;
try
{
_parseAnonymousInnerClass( declaringClass, gsDeclaringClass, enclosingType, iNameOffset, newExpr, mark );
}
finally
{
_iBreakOk = originaliBreakOk;
_iContinueOk = originaliContinueOk;
}
}
}
return declaringClass;
}
private void _parseAnonymousInnerClass( IType declaringClass, IGosuClassInternal gsDeclaringClass, ICompilableTypeInternal enclosingType, int iNameOffset, NewExpression newExpr, int mark )
{
//##todo:
// Note we do NOT case where we are parsing a field initializer since
// the anonymous class should have access to all the class' fields. E.g.,
// class Foo {
// static var Field1 = new Whatever() {
// override function bar() : String {
// return Field2
// static var Field2 : String = "hello"
String strAnonymousClass = IGosuClassInternal.ANONYMOUS_PREFIX + "_" + enclosingType.getAnonymousInnerClassCount();
boolean bTestClass = enclosingType instanceof IGosuClassInternal && ((IGosuClassInternal)enclosingType).isTestClass();
InnerClassFileSystemSourceFileHandle innerSfh = new InnerClassFileSystemSourceFileHandle(ClassType.Class, enclosingType.getName(), strAnonymousClass, bTestClass );
innerSfh.setOffset( iNameOffset );
innerSfh.setMark( mark );
IGosuClassInternal innerGsClass = (IGosuClassInternal)enclosingType.getTypeLoader().makeNewClass( innerSfh );
((IGosuClassInternal)enclosingType).addInnerClass(innerGsClass);
if( declaringClass != null )
{
if( declaringClass.isInterface() )
{
innerGsClass.addInterface( TypeLord.makeDefaultParameterizedType( declaringClass ) );
}
else
{
innerGsClass.setSuperType( TypeLord.makeDefaultParameterizedType( declaringClass ) );
if( declaringClass.isEnum() )
{
innerGsClass.setEnum();
}
}
}
innerGsClass.setEnclosingType( enclosingType );
innerGsClass.setNamespace( enclosingType.getNamespace() );
innerGsClass.createNewParseInfo();
if( isParsingStaticFeature() )
{
innerGsClass.markStatic();
}
if( declaringClass.isInterface() )
{
innerGsClass.addInterface( declaringClass );
newExpr.setType( innerGsClass );
}
else
{
innerGsClass.setSuperType( declaringClass );
if( declaringClass.isEnum() )
{
innerGsClass.setEnum();
}
newExpr.setType( innerGsClass );
}
if( newExpr.getConstructor() != null )
{
GosuConstructorInfo ci = getGsConstructorInfo( newExpr.getConstructor(), gsDeclaringClass );
verify( newExpr, innerGsClass.getParseInfo().addAnonymousConstructor( _symTable, ci ), Res.MSG_NO_CONSTRUCTOR_FOUND_FOR_CLASS, gsDeclaringClass.getName() );
}
innerGsClass.setCannotCaptureSymbols( isParsingBlock() ||
((IGosuClassInternal)innerGsClass.getEnclosingType()).isCannotCaptureSymbols() );
GosuClassParser.parseAnonymousInnerClass( this, innerGsClass );
if( newExpr.getConstructor() != null && !innerGsClass.getTypeInfo().getConstructors().isEmpty() )
{
IConstructorInfo innerCtor = innerGsClass.getTypeInfo().getConstructors().get( 0 );
newExpr.setConstructor( innerCtor );
}
if( !getContextType().isMethodScoring() )
{
popStatement();
}
if( declaringClass.isInterface() )
{
//noinspection unchecked
List<IConstructorInfo> ctors = (List<IConstructorInfo>)innerGsClass.getTypeInfo().getConstructors();
if( ctors.size() > 0 )
{
newExpr.setConstructor( ctors.get( 0 ) );
}
}
}
public boolean isParsingStaticFeature()
{
return !_parsingStaticFeature.isEmpty() && _parsingStaticFeature.peek();
}
public void pushParsingStaticMember( Boolean bParsingStaticFeature )
{
_parsingStaticFeature.push( bParsingStaticFeature );
}
public void popParsingStaticMember()
{
_parsingStaticFeature.pop();
}
public boolean isParsingAbstractConstructor()
{
return !_parsingAbstractConstructor.isEmpty() && _parsingAbstractConstructor.peek();
}
public void pushParsingAbstractConstructor( Boolean bParsingAbstractConstructor )
{
_parsingAbstractConstructor.push( bParsingAbstractConstructor );
}
public void popParsingAbstractConstructor()
{
_parsingAbstractConstructor.pop();
}
private IConstructorInfo getConstructor( IType instanceClass )
{
IConstructorInfo firstCtor = null;
if( instanceClass.getTypeInfo() instanceof IRelativeTypeInfo)
{
if( !instanceClass.isInterface() )
{
List<? extends IConstructorInfo> ctors = ( (IRelativeTypeInfo) instanceClass.getTypeInfo() ).getConstructors( instanceClass );
if ( ! ctors.isEmpty() ) {
firstCtor = ctors.get( 0 );
}
}
}
else
{
if( !instanceClass.isInterface() )
{
ITypeInfo typeInfo = instanceClass.getTypeInfo();
if (typeInfo == null) {
firstCtor = null;
} else {
List<? extends IConstructorInfo> ctors = typeInfo.getConstructors();
if ( ! ctors.isEmpty() ) {
firstCtor = ctors.get( 0 );
}
}
}
}
return firstCtor;
}
private void verifyConstructorIsAccessible( IType instanceClass, NewExpression e, IConstructorType constructorType, boolean bAnonymous )
{
if( e.getType() instanceof ErrorType )
{
return;
}
if( constructorType != null && !(constructorType.getConstructor() instanceof DynamicConstructorInfo) )
{
ITypeInfo typeInfo = instanceClass.getTypeInfo();
List<? extends IConstructorInfo> accessibleCtors = getGosuClass() != null && typeInfo instanceof IRelativeTypeInfo
? ((IRelativeTypeInfo)typeInfo).getConstructors( getGosuClass() )
: typeInfo.getConstructors();
//noinspection SuspiciousMethodCalls
verify( e,
accessibleCtors.contains( constructorType.getConstructor() ) ||
(bAnonymous && constructorType.getConstructor().isProtected()),
Res.MSG_CTOR_HAS_XXX_ACCESS, getModifierString( constructorType ) );
}
}
private String getModifierString( IConstructorType constructorType )
{
return constructorType.getConstructor().isPrivate()
? "private"
: constructorType.getConstructor().isInternal()
? "internal"
: constructorType.getConstructor().isProtected()
? "protected"
: "public";
}
private boolean parseObjectInitializer( IType objectType )
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
boolean b = _parseObjectInitializer( objectType );
setLocation( iOffset, iLineNum, iColumn );
return b;
}
private boolean _parseObjectInitializer( IType objectType )
{
if( match( null, '{' ) )
{
_doParseObjectInitializer(objectType);
verify( peekExpression(), match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_INITIALIZER );
return true;
}
else
{
return false;
}
}
private void _doParseObjectInitializer(IType objectType) {
ObjectInitializerExpression oie = new ObjectInitializerExpression();
do
{
if( parseInitializerAssignment( objectType ) )
{
InitializerAssignment newAssignment = (InitializerAssignment)popStatement();
for( InitializerAssignment existing : oie.getInitializers() )
{
if( existing.getPropertyName() != null && existing.getPropertyName().equals( newAssignment.getPropertyName() ) )
{
newAssignment.addParseException( Res.MSG_REDUNTANT_INITIALIZERS, existing.getPropertyName() );
}
}
oie.add( newAssignment );
}
else
{
verify( oie, false, Res.MSG_EXPECTING_NAME_VALUE_PAIR, makeLightweightParserState() );
}
} while( match( null, ',' ) );
oie.setType( objectType );
pushExpression( oie );
}
private boolean parseInitializerAssignment( IType objectType )
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = _tokenizer.getTokenColumn();
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
//handle two colons in a row with error message to preserve parse tree
//(helps intellisense)
boolean bFoundExtraColon = false;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
bFoundExtraColon = true;
}
parseInitializerIdentifier( objectType );
Identifier identifier = (Identifier)popExpression();
InitializerAssignment se = new InitializerAssignment( objectType, identifier.getSymbol().getName() );
IPropertyInfo propertyInfo = null;
try
{
if( se.getPropertyName() != null )
{
propertyInfo = BeanAccess.getPropertyInfo( objectType, se.getPropertyName(), null, this, getVisibilityConstraint() );
}
}
catch( ParseException e )
{
se.addParseException( e );
}
IType type = ErrorType.getInstance();
if( propertyInfo != null )
{
verifyCase( se, se.getPropertyName(), propertyInfo.getName(), Res.MSG_PROPERTY_CASE_MISMATCH, false );
if( !JavaTypes.COLLECTION().isAssignableFrom( propertyInfo.getFeatureType() ) &&
!JavaTypes.MAP().isAssignableFrom( propertyInfo.getFeatureType() ) )
{
try
{
verifyPropertyWritable( objectType, se.getPropertyName(), true );
}
catch( Exception ex )
{
//noinspection ThrowableResultOfMethodCallIgnored
se.addParseException( ParseException.wrap( ex, makeFullParserState() ) );
}
}
type = propertyInfo.getFeatureType();
}
identifier.setType( type );
verify( se, match( null, "=", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EQUALS_FOR_INITIALIZER_EXPR );
verify( se, !bFoundExtraColon, Res.MSG_ONLY_ONE_COLON_IN_INITIALIZERS );
parseExpression( new ContextType( type ) );
Expression value = popExpression();
if( value.hasParseExceptions() )
{
value.getParseExceptions().get( 0 ).setExpectedType( type );
}
se.setRhs( value );
pushStatement( se );
setLocation( iOffset, iLineNum, iColumn );
return true;
}
return false;
}
private void parseInitializerIdentifier( IType objectType )
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = _tokenizer.getTokenColumn();
Identifier i = new Identifier();
String strToken = null;
int mark = getTokenizer().mark();
if( verify( i, match( null, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_PROPERTY ) )
{
strToken = getTokenizer().getTokenAt( mark ).getStringValue();
}
i.setSymbol( new InitializerSymbol( strToken, objectType ), null);
i.setType( objectType );
pushExpression( i );
setLocation( iOffset, iLineNum, iColumn );
}
private GosuConstructorInfo getGsConstructorInfo( IConstructorInfo ci, IGosuClassInternal gsInstanceClass )
{
if( ci instanceof GosuConstructorInfo )
{
return (GosuConstructorInfo)ci;
}
List<IType> argTypes = new ArrayList<>( 2 );
if (ci != null) {
for( IParameterInfo pi : ci.getParameters() )
{
argTypes.add( pi.getFeatureType() );
}
}
return (GosuConstructorInfo) gsInstanceClass.getTypeInfo().getConstructor( gsInstanceClass, argTypes.toArray(new IType[argTypes.size()]));
}
private boolean isConcreteInitializableType( IType type )
{
return isInitializableType( type ) && !type.isAbstract() && !type.isInterface();
}
private boolean isInitializableType( IType type )
{
return type != null &&
(JavaTypes.COLLECTION().isAssignableFrom( type ) ||
JavaTypes.MAP().isAssignableFrom( type ));
}
private void verifyCanConstructInnerClassFromCallSite( NewExpression e, IType innerClass )
{
if( !(innerClass instanceof IGosuClassInternal) && !(innerClass instanceof IJavaType) )
{
return;
}
if( Modifier.isStatic( innerClass.getModifiers() ) )
{
return;
}
IType innersEnclosingClass = innerClass.getEnclosingType();
if( innersEnclosingClass == null )
{
// Not an inner class
return;
}
IGosuClassInternal constructingFromClass = getScriptPart() != null && getScriptPart().getContainingType() instanceof IGosuClass
? (IGosuClassInternal)getScriptPart().getContainingType()
: null;
verify( e, isConstructingNonStaticInnerClassFromNonStaticContext( innersEnclosingClass, constructingFromClass ),
Res.MSG_CANNOT_INSTANTIATE_NON_STATIC_CLASSES_HERE, innersEnclosingClass.getName(), innerClass.getRelativeName() );
verify( e, constructingFromClass != null &&
isNonStaticInnerClassConstructableFromCurrentFunction( innersEnclosingClass, constructingFromClass ),
Res.MSG_MUST_BE_IN_OUTER_TO_CONSTRUCT_INNER, innersEnclosingClass.getName(), innerClass.getRelativeName() );
}
private boolean isConstructingNonStaticInnerClassFromNonStaticContext(IType innersEnclosingClass, IGosuClassInternal constructingFromClass)
{
if( !innersEnclosingClass.isAssignableFrom( constructingFromClass ) )
{
IGosuClassInternal csr = constructingFromClass;
while( csr != null && csr != innersEnclosingClass )
{
if( csr.isStatic() )
{
return false;
}
csr = (IGosuClassInternal) csr.getEnclosingType();
}
}
return true;
}
private boolean isNonStaticInnerClassConstructableFromCurrentFunction(IType innersEnclosingClass, IGosuClassInternal constructingFromClass)
{
if( !innersEnclosingClass.isAssignableFrom( constructingFromClass ) )
{
IGosuClassInternal csr = constructingFromClass;
while( csr != null && csr != innersEnclosingClass )
{
csr = (IGosuClassInternal) csr.getEnclosingType();
}
if( csr != innersEnclosingClass )
{
// current function's declaring class must be enclosed by inner class' declaring class
return false;
}
}
return true;
}
private void _parseInitializerExpression( ContextType type )
{
if( type != null && JavaTypes.COLLECTION().isAssignableFrom( type.getType() ) )
{
parseCollectionInitializerList( type.getType() );
}
else if( type != null && JavaTypes.MAP().isAssignableFrom( type.getType() ) )
{
parseMapInitializerList( type );
}
else if( type.getType().isDynamic() )
{
parseCollectionInitializerList( type.getType() );
}
else
{
BadInitializerExpression be = new BadInitializerExpression();
pushExpression(be);
}
}
private IConstructorInfo getImplicitConstructor(IType type) {
if (type instanceof IErrorType ) {
return null;
}
IConstructorInfo constructor = null;
if (!type.isAbstract()) {
ITypeInfo typeInfo = type.getTypeInfo();
if (typeInfo instanceof IRelativeTypeInfo) {
constructor = ((IRelativeTypeInfo) typeInfo).getConstructor(getGosuClass(), IType.EMPTY_ARRAY);
} else {
constructor = typeInfo.getConstructor();
}
}
return constructor;
}
/*
* <i>collection-init-list</i>
* <expression>
* <collection-init-list> , <expression>
*/
private void parseCollectionInitializerList( IType type )
{
CollectionInitializerExpression lie = new CollectionInitializerExpression();
IType componentType;
if( type.isDynamic() )
{
componentType = type.getComponentType();
}
else
{
IType listType = TypeLord.findParameterizedTypeInHierarchy( type, JavaTypes.COLLECTION() );
if( listType.isParameterizedType() && !listType.isGenericType() )
{
componentType = listType.getTypeParameters()[0];
}
else
{
componentType = JavaTypes.OBJECT();
}
}
do
{
parseExpression( new ContextType( componentType ) );
Expression e = popExpression();
lie.add( e );
}
while( match( null, ',' ) );
lie.setType( type );
pushExpression( lie );
}
/*
* <i>map-init-list</i>
* <key-expression> <b>-></b> <value-expression> ;
* <map-init-list> , <expression>
*/
private void parseMapInitializerList( ContextType type )
{
MapInitializerExpression mie = new MapInitializerExpression();
IType listType = TypeLord.findParameterizedTypeInHierarchy( type.getType(), JavaTypes.MAP() );
IType keyType = JavaTypes.OBJECT();
IType valueType = JavaTypes.OBJECT();
if( listType.isParameterizedType() )
{
keyType = listType.getTypeParameters()[0];
valueType = listType.getTypeParameters()[1];
}
do
{
parseExpression( new ContextType( keyType ) );
Expression key = popExpression();
Expression value;
if( verify( key, match( null, "->", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_ARROW_AFTER_MAP_KEY ) )
{
parseExpression( new ContextType( valueType ) );
value = popExpression();
}
else
{
value = new NullExpression();
}
mie.add( key, value );
}
while( match( null, ',' ) );
pushExpression( mie );
}
/*
* <i>array-value-list</i>
* <expression>
* <array-value-list> , <expression>
*/
List<Expression> parseArrayValueList( IType componentType )
{
while( componentType instanceof TypeVariableType )
{
componentType = ((TypeVariableType)componentType).getBoundingType();
}
List<Expression> valueExpressions = new ArrayList<>();
do
{
parseExpression( new ContextType( componentType ) );
Expression e = popExpression();
valueExpressions.add( e );
}
while( match( null, ',' ) );
return valueExpressions;
}
private void parseIndirectMemberAccess( int iOffset, int iLineNum, int iColumn )
{
parseIndirectMemberAccess( iOffset, iLineNum, iColumn, false );
}
private void parseIndirectMemberAccess( int iOffset, int iLineNum, int iColumn, boolean bParsingTypeLiteralOnly )
{
do
{
if( !_parseIndirectMemberAccess( bParsingTypeLiteralOnly ) )
{
Expression expr = peekExpression();
if( !maybeReplaceErrantPackageExprWithEnumConstEpr( iOffset, iLineNum, iColumn, expr ) )
{
maybeReplacePackageExprWithTypeLiteral( iOffset, iLineNum, iColumn, expr );
verify( peekExpression(), !(peekExpression().getType() instanceof NamespaceType), Res.MSG_EXPECTING_TYPE_TO_FOLLOW_PACKAGE_NAME );
verify( peekExpression(), !(peekExpression() instanceof SuperAccess), Res.MSG_MEMBER_ACCESS_REQUIRED_FOR_SUPER );
}
break;
}
setLocation( iOffset, iLineNum, iColumn );
Expression expr = peekExpression();
if( expr instanceof MemberAccess || expr instanceof BeanMethodCallExpression )
{
IType inferType = _ctxInferenceMgr.infer( expr );
if( inferType != null )
{
popExpression();
expr = possiblyWrapWithImplicitCoercion( expr, inferType );
pushExpression( expr );
}
}
} while( true );
}
private void maybeReplacePackageExprWithTypeLiteral( int iOffset, int iLineNum, int iColumn, Expression expr )
{
if( expr instanceof Identifier && expr.getType() instanceof NamespaceType )
{
String strNamespace = expr.getType().getName();
if( getNamespace() != null )
{
popExpression(); // Pop existing expression; we're going to transform it to a type literal
tryToMakeTypeLiteral( new String[] {strNamespace}, iOffset, iLineNum, iColumn, strNamespace, expr );
}
}
}
private boolean maybeReplaceErrantPackageExprWithEnumConstEpr( int iOffset, int iLineNum, int iColumn, Expression expr )
{
if( expr instanceof Identifier && expr.getType() instanceof NamespaceType )
{
MemberAccess enumConstExpr = parseUnqualifiedEnumConstant( ((Identifier)expr).getSymbol().getName() );
if( enumConstExpr != null )
{
// Set the errant expression's location so that its subordinate expressions are not
// included in the expression we try next...
setLocation( iOffset, iLineNum, iColumn );
enumConstExpr.setStartOffset( iOffset );
getLocationsList().remove( peekExpression().getLocation() );
popExpression();
pushExpression( enumConstExpr );
return true;
}
}
return false;
}
boolean _parseIndirectMemberAccess( boolean bParseTypeLiteralOnly )
{
Expression peekRootExpression = peekExpression();
Token token = getTokenizer().getCurrentToken();
int operatorLineNumber = token.getLine();
String value = token.getStringValue();
if( '.' == token.getType() ||
(token.getType() == SourceCodeTokenizer.TT_OPERATOR && value != null &&
("?.".equals( value ) ||
"*.".equals( value ))) )
{
getTokenizer().nextToken();
MemberAccessKind kind = MemberAccessKind.getForOperator( value );
Expression expression = popExpression();
verify( expression, kind != MemberAccessKind.EXPANSION || TypeLord.getExpandableComponentType( expression.getType() ) != null,
Res.MSG_TYPE_IS_NOT_ITERABLE, expression.getType().getName() );
Token T = new Token();
verify( expression, match( T, SourceCodeTokenizer.TT_WORD ) || match( T, SourceCodeTokenizer.TT_KEYWORD ), Res.MSG_EXPECTING_MEMBER_ACCESS_PATH );
parseMemberAccess( expression, kind, T.getTokenStart(), T._strValue == null ? "" : T._strValue,
makeLazyLightweightParserState(), bParseTypeLiteralOnly );
verifyNonVoidExpression( peekExpression() );
}
else if( !bParseTypeLiteralOnly && parseFeatureLiteral( token, peekRootExpression ) )
{
// good
}
else if( !bParseTypeLiteralOnly &&
('[' == token.getType() ||
(token.getType() == SourceCodeTokenizer.TT_OPERATOR &&
"?[".equals( value ))) )
{
getTokenizer().nextToken();
Expression rootExpression = popExpression();
IType rootType = rootExpression.getType();
IType indexType = ArrayAccess.supportsArrayAccess( rootType )
? JavaTypes.pINT()
: MapAccess.supportsMapAccess( rootType )
? MapAccess.getKeyType( rootType )
: null;
boolean bDynamicRoot = rootType.isDynamic();
if( bDynamicRoot )
{
parseExpression();
}
else
{
parseExpression( new ContextType( indexType ) );
}
Expression indexExpression = popExpression();
Expression arrayAccess;
// Assume null-safety for backward compatibility in non-open-source versions, otherwise make it explicit with "?["
boolean bNullSafe = !ILanguageLevel.Util.STANDARD_GOSU() || value != null && "?[".equals( value );
if( ArrayAccess.supportsArrayAccess( rootType ) )
{
ArrayAccess aa = new ArrayAccess();
aa.setRootExpression( rootExpression );
if( verify( indexExpression, indexExpression.getType() == JavaTypes.pINT() ||
indexExpression.getType() == JavaTypes.INTEGER() ||
bDynamicRoot,
Res.MSG_ARRAY_INDEX_MUST_BE_INT ) )
{
aa.setMemberExpression( indexExpression );
}
aa.setNullSafe( bNullSafe );
pushExpression( aa );
arrayAccess = aa;
}
else if( MapAccess.supportsMapAccess( rootType ) )
{
MapAccess ma = new MapAccess();
ma.setRootExpression( rootExpression );
ma.setKeyExpression( indexExpression );
ma.setNullSafe( bNullSafe );
pushExpression( ma );
arrayAccess = ma;
}
else if( isSuperCall( rootExpression, indexExpression ) )
{
SuperAccess ma = new SuperAccess();
ma.setRootExpression( (Identifier)rootExpression );
ma.setKeyExpression( (TypeLiteral)indexExpression );
IType superType = verifySuperTypeIsDeclaredInCompilingClass( (TypeLiteral)indexExpression );
ma.setType( superType );
pushExpression( ma );
arrayAccess = ma;
}
else
{
verify( rootExpression, BeanAccess.isBeanType( rootExpression.getType() ) ||
(rootExpression.getType() instanceof MetaType), Res.MSG_EXPECTING_BEAN_TYPE_WITH_REFLECTION_OPERATOR );
verify( indexExpression,
JavaTypes.CHAR_SEQUENCE().isAssignableFrom( indexExpression.getType() ),
Res.MSG_PROPERTY_REFLECTION_ONLY_WITH_STRINGS );
MemberAccess ma = new MemberAccess();
ma.setRootExpression( rootExpression );
ma.setMemberExpression( indexExpression );
ma.setType( JavaTypes.OBJECT() );
ma.setMemberAccessKind( bNullSafe ? MemberAccessKind.NULL_SAFE : MemberAccessKind.NORMAL );
pushExpression( ma );
arrayAccess = ma;
}
verify( arrayAccess, match( null, ']' ), Res.MSG_EXPECTING_BRACKET_TO_CLOSE_DYNAMIC_MEMBER_ACCESS );
}
else if( peekExpression().getType() instanceof IBlockType && match( null, '(' ) )
{
IBlockType iBlockType = (IBlockType)peekExpression().getType();
BlockInvocation bi = new BlockInvocation( popExpression() );
boolean bNoArgsProvided;
if( !(bNoArgsProvided = match( null, ')' )) || iBlockType.hasOptionalParams() )
{
MethodScore score = parseArgumentList( peekRootExpression.getType(), bi, Collections.singletonList( iBlockType ), IType.EMPTY_ARRAY, true, bNoArgsProvided );
bi.setNamedArgOrder( score.getNamedArgOrder() );
bi.setArgs( score.getArguments() );
verify( bi, bNoArgsProvided || match( null, ')' ), Res.MSG_EXPECTING_FUNCTION_CLOSE );
}
verify( bi, bi.getArgs().size() == iBlockType.getParameterTypes().length, Res.MSG_WRONG_NUM_OF_ARGS,
iBlockType.getParameterTypes().length );
bi.setType( iBlockType.getReturnType() );
verifyNonVoidExpression( bi );
pushExpression( bi );
}
else
{
return false;
}
// Reroot the root expression under the indirect access
if( peekRootExpression != null && !peekRootExpression.hasParseExceptions() )
{
ParseTree rootLocation = peekRootExpression.getLocation();
if( rootLocation != null )
{
getLocationsList().remove( peekRootExpression.getLocation() );
setLocation( rootLocation.getOffset(), rootLocation.getLineNum(), rootLocation.getColumn() );
peekExpression().getLocation().addChild( peekRootExpression.getLocation() );
}
}
setOperatorLineNumber(peekExpression(), operatorLineNumber);
return true;
}
private IType verifySuperTypeIsDeclaredInCompilingClass( TypeLiteral superTypeLiteral )
{
verify( superTypeLiteral, !superTypeLiteral.getContainedParsedElementsByType( ITypeParameterListClause.class, null ), Res.MSG_PARAMETERIZED_TYPE_NOT_ALLOWED_HERE );
IType type = TypeLord.getPureGenericType( superTypeLiteral.getType().getType() );
ICompilableTypeInternal gosuClass = getGosuClass();
IType superType = gosuClass.getSupertype();
if( superType != null && TypeLord.getPureGenericType( superType ) == type )
{
return superType;
}
else if( superType == null && type == JavaTypes.OBJECT() )
{
return type;
}
for( IType iface : gosuClass.getInterfaces() )
{
if( TypeLord.getPureGenericType( iface ) == type )
{
return iface;
}
}
addError( superTypeLiteral, Res.MSG_NOT_A_SUPERTYPE, type );
return type;
}
private boolean isSuperCall( Expression rootExpression, Expression indexExpression )
{
return indexExpression instanceof TypeLiteral &&
rootExpression instanceof Identifier &&
((Identifier)rootExpression).getSymbol().getName().equals( Keyword.KW_super.getName() );
}
private boolean parseFeatureLiteral( Token token, Expression root )
{
Token T = new Token();
if( SourceCodeTokenizer.TT_OPERATOR == token.getType() && "#".equals( token.getStringValue() ) )
{
getTokenizer().nextToken();
if( root != popExpression() )
{
throw new IllegalStateException();
}
FeatureLiteral fle = new FeatureLiteral( root );
boolean foundWord = verify( fle, match( T, SourceCodeTokenizer.TT_WORD ) || match( T, Keyword.KW_construct ),
Res.MSG_FL_EXPECTING_FEATURE_NAME );
if( foundWord )
{
if( match( null, "<", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
parseErrantFeatureLiteralParameterization( fle );
}
if( match( null, '(' ) )
{
if( !match( null, ')' ) )
{
MethodScore score = parseArgumentList( root.getType(), fle, fle.getFunctionTypes( T._strValue ), IType.EMPTY_ARRAY, true, false );
if( allTypeLiterals( score.getArguments() ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
fle.removeParseException( Res.MSG_AMBIGUOUS_METHOD_INVOCATION );
List<IType> types = evalTypes( score.getArguments() );
if( T._strValue.equals( Keyword.KW_construct.getName() ) )
{
verify( fle, fle.resolveConstructor( types ), Res.MSG_FL_CONSTRUCTOR_NOT_FOUND, StringUtil.join( ",", types ) );
}
else
{
verify( fle, fle.resolveMethod( T._strValue, types ), Res.MSG_FL_METHOD_NOT_FOUND, T._strValue, StringUtil.join( ",", types ) );
}
}
else
{
IInvocableType funcType = score.getRawFunctionType();
if( funcType == null || funcType.getParameterTypes().length != score.getArguments().size() )
{
fle.setFeature( ErrorType.getInstance().getTypeInfo().getMethod( T._strValue ), score.getArguments() );
verify( fle, false, Res.MSG_FL_METHOD_NOT_FOUND, T._strValue, "" );
}
else if( funcType instanceof IConstructorType )
{
fle.setFeature( ((IConstructorType)funcType).getConstructor(), score.getArguments() );
}
else
{
fle.setFeature( ((IFunctionType)funcType).getMethodInfo(), score.getArguments() );
}
}
verify( fle, match( null, ')' ), Res.MSG_FL_EXPECTING_RIGHT_PAREN );
}
else
{
if( T._strValue.equals( Keyword.KW_construct.getName() ) )
{
verify( fle, fle.resolveConstructor( Collections.<IType>emptyList() ), Res.MSG_FL_CONSTRUCTOR_NOT_FOUND, "" );
}
else
{
verify( fle, fle.resolveMethod( T._strValue, Collections.<IType>emptyList() ), Res.MSG_FL_METHOD_NOT_FOUND, T._strValue, "" );
}
}
}
else
{
boolean propResolved = fle.resolveProperty( T._strValue );
if( !propResolved )
{
verify( fle, propResolved, Res.MSG_FL_PROPERTY_NOT_FOUND, T._strValue );
}
}
}
else
{
fle.setType( ErrorType.getInstance() );
}
if( root instanceof FeatureLiteral )
{
verify( fle, fle.isPropertyLiteral(), Res.MSG_FL_ONLY_PROPERTIES_MAY_BE_CHAINED );
verify( fle, ((FeatureLiteral) root).isPropertyLiteral(), Res.MSG_FL_ONLY_PROPERTIES_MAY_BE_CHAINED );
}
if( fle.isStaticish() && !fle.hasParseExceptions() )
{
verify( fle, root instanceof TypeLiteral, Res.MSG_FL_STATIC_FEATURES_MUST_BE_REFERENCED_FROM_THEIR_TYPES );
}
pushExpression( fle );
return true;
}
else
{
return false;
}
}
private void parseErrantFeatureLiteralParameterization(FeatureLiteral fle) {
while( verify( fle, parseTypeLiteral(), null ) )
{
popExpression(); // TypeLiteral expr
if( !match( null, ',' ) )
{
break;
}
}
verify( fle, match( null, ">", SourceCodeTokenizer.TT_OPERATOR, true ), Res.MSG_FL_EXPECTING_RIGHT_CARET );
verify( fle, false, Res.MSG_FL_GENERIC_FUNCTION_REFERENCES_NOT_YET_SUPPORTED );
}
private List<IType> evalTypes(List<IExpression> arguments) {
List<IType> types = new ArrayList<>();
for( IExpression expression : arguments)
{
expression.clearParseExceptions();
expression.clearParseWarnings();
if( expression instanceof ImplicitTypeAsExpression)
{
expression = ((ImplicitTypeAsExpression)expression).getLHS();
}
types.add( ((TypeLiteral)expression).evaluate() );
}
return types;
}
private boolean allTypeLiterals(List<IExpression> args) {
boolean allTypeLiterals = true;
for( IExpression arg : args)
{
if( !(arg instanceof TypeLiteral || (arg instanceof ImplicitTypeAsExpression && ((ImplicitTypeAsExpression)arg).getLHS() instanceof TypeLiteral)) )
{
allTypeLiterals = false;
}
}
return allTypeLiterals;
}
private void setOperatorLineNumber( Expression expression, int operatorLineNumber )
{
if( expression instanceof IHasOperatorLineNumber )
{
((IHasOperatorLineNumber)expression).setOperatorLineNumber( operatorLineNumber );
}
}
boolean parseNameOrMethodCall( Token token )
{
if( token.getType() == SourceCodeTokenizer.TT_KEYWORD &&
(Keyword.KW_true == token.getKeyword() ||
Keyword.KW_false == token.getKeyword() ||
Keyword.KW_NaN == token.getKeyword() ||
Keyword.KW_Infinity == token.getKeyword() ||
Keyword.KW_null == token.getKeyword()) )
{
return false;
}
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
boolean bRet = _parseNameOrMethodCall( token );
if( bRet )
{
verifyNonVoidExpression(peekExpression());
setLocation( iOffset, iLineNum, iColumn );
}
Expression expr = peekExpression();
if( expr instanceof Identifier )
{
IType inferType = _ctxInferenceMgr.infer( expr );
if( inferType != null )
{
Identifier ma = (Identifier)popExpression();
expr = possiblyWrapWithImplicitCoercion( ma, inferType );
//ma.setType( inferType );
pushExpression( expr );
}
}
return bRet;
}
boolean _parseNameOrMethodCall( Token token )
{
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
LazyLightweightParserState state = makeLazyLightweightParserState(); //capture position of word for error reporting
int markBefore = getTokenizer().mark();
if( isWordOrValueKeyword( token ) || matchPrimitiveType( true ) )
{
getTokenizer().nextToken();
int markBeforeTypeArgs = _tokenizer.mark();
String[] strToken = new String[] {getTokenizer().getTokenAt( markBefore ).getStringValue()};
MethodCallExpression e = new MethodCallExpression();
IType[] typeParameters = parsePossibleFunctionParameterization( strToken[0], e );
int markAfterTypeArgs = _tokenizer.mark();
int iLocBeforeTypeArgs = _locations.size();
String strFunction = strToken[0];
ISymbol functionSymbol = possiblyResolveFunctionSymbol( e, strFunction );
// Only parse the function symbol as a method invocation if it is not a block. Blocks parse as identifiers
// and then indirect BlockInvocation expressions
if( !isBlockSym( functionSymbol ) && !isInSeparateStringTemplateExpression() && match( null, '(' ) )
{
parseMethodCall( strToken, iOffset, iLineNum, iColumn, state, e, typeParameters, strFunction, functionSymbol, markBeforeTypeArgs, iLocBeforeTypeArgs, markAfterTypeArgs );
}
else
{
token = getTokenizer().getCurrentToken();
if( !Keyword.KW_super.equals( strToken[0] ) ||
token.getType() == '.' ||
"#".equals( token.getStringValue() ) ||
token.getType() == '[' )
{
parseIdentifierOrTypeLiteralOrEnumConstant( strToken, iOffset, iLineNum, iColumn );
}
else
{
getTokenizer().restoreToMark( markBefore );
return false;
}
}
verify( peekExpression(), !GosuObjectUtil.equals( strFunction, Keyword.KW_this.toString() ) ||
!isParsingStaticFeature() ||
isParsingConstructor(), Res.MSG_CANNOT_REFERENCE_THIS_IN_STATIC_CONTEXT );
return true;
}
return false;
}
private void parseMethodCall( String[] t, int iOffset, int iLineNum, int iColumn, LazyLightweightParserState state, MethodCallExpression e, IType[] typeParameters, String strFunction, ISymbol functionSymbol, int markBeforeTypeArgs, int iLocBeforeTypeArgs, int markAfterTypeArgs )
{
int iLocationsCount = _locations.size();
parseMethodCall( t, state, e, typeParameters, strFunction, functionSymbol );
Expression expr = peekExpression();
if( expr.hasParseExceptions() )
{
maybeParseIdentifierAssumingOpenParenIsForParenthesizedExpr( t, iOffset, iLineNum, iColumn, state, e, typeParameters, strFunction, functionSymbol, markAfterTypeArgs, iLocationsCount );
}
expr = peekExpression();
if( hasParseExceptions( expr ) )
{
boolean parsed = maybeParseImpliedThisMethod( iOffset, iLineNum, iColumn, state, strFunction, markBeforeTypeArgs, iLocBeforeTypeArgs );
expr = peekExpression();
if( !parsed || hasParseExceptions( expr ) )
{
maybeParseImpliedStaticMethod( iOffset, iLineNum, iColumn, state, strFunction, markBeforeTypeArgs, iLocBeforeTypeArgs );
expr = peekExpression();
if( hasParseExceptions( expr ) )
{
// Failed to parse 'Xxx()' as implicitly qualified 'this.Xxx()', reparse as 'Xxx()'
backtrack( markAfterTypeArgs, iLocationsCount );
if( !match( null, '(' ) )
{
throw new IllegalStateException( "Position s/b on '('" );
}
parseMethodCall( t, state, new MethodCallExpression(), typeParameters, strFunction, functionSymbol );
}
}
}
}
private boolean hasParseExceptions( Expression expr )
{
if( expr == null )
{
return false;
}
if( expr.hasParseExceptions() )
{
return true;
}
if( expr instanceof IHasArguments )
{
IExpression[] args = ((IHasArguments)expr).getArgs();
if( args != null )
{
for( IExpression arg : args )
{
if( arg.hasParseExceptions() )
{
return true;
}
}
}
}
return false;
}
// Right now this is called to resolve a getter/setter call from a Java super class when getter/setter methods are overloaded
// e.g., void setFoo( String value ) {} and void setFoo( Integer value ) {} are defined in the super class.
private boolean maybeParseImpliedThisMethod( int iOffset, int iLineNum, int iColumn, LazyLightweightParserState state, String strFunction, int markBeforeTypeArgs, int iLocBeforeTypeArgs )
{
if( getTokenizerInstructor() != null || getScriptPart() != null && TemplateGenerator.GS_TEMPLATE_PARSED.equals( getScriptPart().getId() ) )
{
// templates can only access static members from its optional super class
return false;
}
ISymbol thisSym = getSymbolTable().getThisSymbolFromStackOrMap();
if( thisSym == null || getPropertyNameFromMethodNameIncludingSetter( strFunction ) == null )
{
return false;
}
backtrack( markBeforeTypeArgs, iLocBeforeTypeArgs );
// Make a synthetic 'this' qualifier
Identifier root = new Identifier();
root.setSymbol( thisSym, getSymbolTable() );
root.setSynthetic( true );
root.setType( thisSym.getType() );
pushExpression( root );
setLocation( getTokenizer().getTokenStart(), iLineNum, iColumn, true, true );
popExpression();
// Try to parse 'Xxx()' as implicitly qualified 'this.Xxx()'
parseMemberAccess( root, MemberAccessKind.NORMAL, iOffset, strFunction, state, false );
return true;
}
private void maybeParseImpliedStaticMethod( int iOffset, int iLineNum, int iColumn, LazyLightweightParserState state, String strFunction, int markBeforeTypeArgs, int iLocBeforeTypeArgs )
{
if( getTokenizerInstructor() != null || getScriptPart() != null && TemplateGenerator.GS_TEMPLATE_PARSED.equals( getScriptPart().getId() ) )
{
// templates can only access static members from its optional super class
return;
}
ICompilableTypeInternal gsClass = getGosuClass();
if( gsClass == null )
{
return;
}
backtrack( markBeforeTypeArgs, iLocBeforeTypeArgs );
// Make a synthetic 'MyClass' type literal qualifier
TypeLiteral root = new TypeLiteral( gsClass.getLiteralMetaType(), true );
root.setSynthetic( true );
pushExpression( root );
setLocation( getTokenizer().getTokenStart(), iLineNum, iColumn, true, true );
popExpression();
// Try to parse 'Xxx()' as implicitly qualified 'MyClass.Xxx()'
parseMemberAccess( root, MemberAccessKind.NORMAL, iOffset, strFunction, state, false );
}
private void maybeParseIdentifierAssumingOpenParenIsForParenthesizedExpr( String[] t, int iOffset, int iLineNum, int iColumn, IParserState state, MethodCallExpression e, IType[] typeParameters, String strFunction, ISymbol functionSymbol, int mark, int iLocationsCount )
{
if( !isOpenParenOnNextLine( mark ) )
{
return;
}
backtrack( mark, iLocationsCount );
parseIdentifierOrTypeLiteralOrEnumConstant( t, iOffset, iLineNum, iColumn );
Expression expr = peekExpression();
if( expr.hasParseExceptions() )
{
// Failed to parse as Identifier etc., reparse as Method call
backtrack( mark, iLocationsCount );
parseMethodCall( t, state, e, typeParameters, strFunction, functionSymbol );
}
}
void backtrack( int mark, int iLocationsCount )
{
backtrack( mark, iLocationsCount, popExpression() );
}
void backtrack( int mark, int iLocationsCount, Expression expr )
{
_tokenizer.restoreToMark( mark );
removeInnerClasses( expr );
removeLocationsFrom( iLocationsCount );
}
private void parseMethodCall( String[] t, IParserState state, MethodCallExpression e, IType[] typeParameters, String strFunction, ISymbol functionSymbol )
{
// MethodCall
if( functionSymbol == null && getGosuClass() != null )
{
functionSymbol = getGosuClass().getExternalSymbol( strFunction );
}
if( functionSymbol != null )
{
IType symbolType = functionSymbol.getType();
if( symbolType instanceof IFunctionType )
{
int mark = getTokenizer().mark();
int iLocationsCount = _locations.size();
parsePlainFunction( (IFunctionSymbol)functionSymbol );
verifyCase( peekExpression(), strFunction, functionSymbol.getName(), state, Res.MSG_FUNCTION_CASE_MISMATCH, false );
if( peekExpression().hasParseException( Res.MSG_WRONG_NUMBER_OF_ARGS_TO_FUNCTION ) )
{
// maybe there's a print() method defined locally...
backtrack( mark, iLocationsCount );
}
else
{
return;
}
}
if( symbolType.isDynamic() )
{
// dynamic symbol may need to be captured e.g., if it's value is a closure
functionSymbol = maybeCaptureSymbol( e, functionSymbol );
parseDynamicFunction( (Symbol)functionSymbol );
verifyCase( peekExpression(), strFunction, functionSymbol.getName(), state, Res.MSG_FUNCTION_CASE_MISMATCH, false );
return;
}
}
int iParenStart = _tokenizer.getTokenStart();
e.setArgPosition( iParenStart );
List<IFunctionType> listFunctionTypes = null;
final boolean bThis = GosuObjectUtil.equals( strFunction, Keyword.KW_this.getName() );
boolean isRecursiveConstructorCall = false;
boolean bNoArgsProvided;
if( !(bNoArgsProvided = match( null, ')' )) ||
((listFunctionTypes = getFunctionTypesForName( strFunction )).size() == 1 && listFunctionTypes.get( 0 ).hasOptionalParams()) )
{
if( listFunctionTypes == null )
{
listFunctionTypes = getFunctionTypesForName( strFunction );
}
if( typeParameters != null )
{
listFunctionTypes = parameterizeFunctionTypes( e, typeParameters, listFunctionTypes );
}
MethodScore bestMethod = parseArgumentList( getGosuClass(), e, listFunctionTypes, typeParameters, true, bNoArgsProvided );
//noinspection SuspiciousToArrayCall
Expression[] eArgs = bestMethod.getArguments().toArray( new Expression[bestMethod.getArguments().size()] );
e.setArgs( eArgs );
boolean bMatched = bestMethod.isValid() && bestMethod.getRawFunctionType().getParameterTypes().length == eArgs.length;
for( Expression arg : eArgs )
{
if( arg.hasParseExceptions() )
{
bMatched = false;
break;
}
}
if( !bMatched )
{
if( listFunctionTypes.isEmpty() )
{
if( staticRefToNonStaticFunc( strFunction, eArgs ) )
{
verify( e, false, state, Res.MSG_CANNOT_CALL_NON_STATIC_METHOD_FROM_STATIC_CONTEXT, strFunction );
}
else
{
verify( e, false, state, Res.MSG_NO_SUCH_FUNCTION, strFunction );
e.setFunctionSymbol( new Symbol( strFunction, ErrorType.getInstance(), null ) );
}
}
}
if( bestMethod.isValid() )
{
// Did not parse as object literal
IFunctionType rawFunctionType = (IFunctionType)bestMethod.getRawFunctionType();
verifyArgCount( e, bestMethod.getArguments().size(), rawFunctionType );
if( !(bThis || GosuObjectUtil.equals( strFunction, Keyword.KW_super.getName() )) )
{
verifyCase( e, strFunction, rawFunctionType.getName(), state, Res.MSG_FUNCTION_CASE_MISMATCH, false );
}
else if( bThis )
{
final IType[] parameterTypes0 = peekParsingFunction().getParameterTypes();
final IType[] parameterTypes1 = rawFunctionType.getParameterTypes();
isRecursiveConstructorCall = parameterTypes0.length == parameterTypes1.length &&
Arrays.equals( parameterTypes0, parameterTypes1 );
}
e.setFunctionSymbol( getDFSForFunctionType( strFunction, bestMethod ) );
e.setNamedArgOrder( bestMethod.getNamedArgOrder() );
IFunctionType inferredFunctionType = (IFunctionType)bestMethod.getInferredFunctionType();
if( inferredFunctionType instanceof FunctionType )
{
((FunctionType)inferredFunctionType).setScriptPart( rawFunctionType.getScriptPart() );
}
e.setType( inferredFunctionType.getReturnType() );
e.setFunctionType( inferredFunctionType );
}
else
{
e.setType( ErrorType.getInstance() );
}
verify( e, bNoArgsProvided || match( null, ')' ), Res.MSG_EXPECTING_FUNCTION_CLOSE );
}
else
{
if( bThis &&
bNoArgsProvided &&
peekParsingFunction().getParameterTypes().length == 0 &&
getScriptPart().getContainingType().getName().endsWith( peekParsingFunction().getName() ) )
{
isRecursiveConstructorCall = true;
}
IFunctionSymbol function = (IFunctionSymbol)getSymbolTable().getSymbol( strFunction + "()" );
if( function == null )
{
function = (IFunctionSymbol)getSymbolTable().getSymbol( strFunction );
}
// check for property match *before* checking for overloading
if( supportPropertyAccessAsGetterCall( t, strFunction, function ) )
{
return;
}
if( function == null )
{
listFunctionTypes = getFunctionTypesForName( strFunction );
if( !listFunctionTypes.isEmpty() )
{
verifyArgCount( e, 0, listFunctionTypes.get( 0 ) );
}
List<IFunctionSymbol> possibleMatches = getDfsDeclsForFunction( strFunction );
if( !possibleMatches.isEmpty() )
{
function = possibleMatches.get( 0 );
e.setFunctionSymbol( function );
}
}
if( function == null || !(function.getType() instanceof IFunctionType) )
{
if( staticRefToNonStaticFunc( strFunction, new Expression[0] ) )
{
verify( e, false, state, Res.MSG_CANNOT_CALL_NON_STATIC_METHOD_FROM_STATIC_CONTEXT, strFunction );
}
else
{
verify( e, false, state, Res.MSG_NO_SUCH_FUNCTION, strFunction );
e.setFunctionSymbol( new Symbol( strFunction, ErrorType.getInstance(), null ) );
}
e.setType( ErrorType.getInstance() );
}
else
{
e.setFunctionSymbol( function );
verifyCase( e, strFunction, function.getDisplayName(), state, Res.MSG_FUNCTION_CASE_MISMATCH, false );
IFunctionType funcType = (IFunctionType)function.getType();
if( typeParameters != null )
{
if( verifyCanParameterizeType( e, funcType, typeParameters ) )
{
IFunctionType parameterizedFuncType = (IFunctionType)funcType.getParameterizedType( typeParameters );
if( verify( e, parameterizedFuncType != null, Res.MSG_COULD_NOT_PARAMETERIZE ) )
{
funcType = parameterizedFuncType;
}
}
}
assert funcType != null;
if( isEndOfExpression() )
{
funcType = maybeParameterizeOnCtxType( funcType );
}
IFunctionType boundFuncType = boundFunctionType( funcType );
e.setType( boundFuncType.getReturnType() );
e.setFunctionType( boundFuncType );
}
e.setArgs( null );
}
verify( e, !isRecursiveConstructorCall, Res.MSG_RECURSIVE_CONSTRUCTOR );
if( isParsingAbstractConstructor() &&
e.getFunctionSymbol() instanceof DynamicFunctionSymbol &&
e.getFunctionSymbol().isAbstract() )
{
e.addParseException( new ParseException( state, Res.MSG_NO_ABSTRACT_METHOD_CALL_IN_CONSTR, e.getFunctionSymbol().getDisplayName() ) );
}
if( e != null )
{
verifyNotCallingOverridableFunctionFromCtor( e );
if( getGosuClass() != null && getGosuClass().isAnonymous() && getGosuClass().getEnclosingType() instanceof IGosuEnhancement )
{
verify( e, false, Res.MSG_CANNOT_REFERENCE_ENCLOSING_METHODS_WITHIN_ENHANCEMENTS );
}
verifyReifiedCallHasProperContext( e );
}
pushExpression( e );
}
private void verifyReifiedCallHasProperContext( MethodCallExpression e )
{
IFunctionType funcType = e.getFunctionType();
if( funcType == null || !Modifier.isReified( funcType.getModifiers() ) )
{
return;
}
verifyTypeVarAreReified( e, e.getFunctionType() );
}
private boolean supportPropertyAccessAsGetterCall( String[] t, String strFunction, IFunctionSymbol function )
{
// Note we only support fake isXxx(), getXxx(), and setXxx() access to a property if the property
// is implemented in source code by is/get/set methods. In other words we only support, for example,
// explicit properties that distinguish method calls from property access; Gosu code does not apply here.
List<IFunctionType> listFunctionTypes;
if( (function == null || !(function.getType() instanceof IFunctionType)) )
{
listFunctionTypes = new ArrayList<>();
addJavaPropertyMethods( strFunction, getGosuClass(), listFunctionTypes );
if( listFunctionTypes.stream().anyMatch( ft -> ft.getDisplayName().equals( strFunction ) ) )
{
String strPropertyName = getPropertyNameFromMethodName( strFunction );
boolean bPossiblePropertyName = getGosuClass() != null && strPropertyName != null;
if( bPossiblePropertyName )
{
t[0] = strPropertyName;
parseIdentifier( new PropertyAsMethodCallIdentifier( strFunction ), t );
Expression expression = peekExpression();
if( !expression.hasParseExceptions() )
{
return true;
}
popExpression();
}
}
}
return false;
}
private ISymbol maybeCaptureSymbol( MethodCallExpression e, ISymbol functionSymbol )
{
if( isParsingBlock() || isOrIsEnclosedByAnonymousClass( getGosuClass() ) && !getOwner().isParsingAnnotation() )
{
ISymbol capturedSym = captureSymbol( getCurrentEnclosingGosuClass(), functionSymbol.getName(), e );
if( capturedSym != null )
{
functionSymbol = capturedSym;
}
}
return functionSymbol;
}
private boolean isInSeparateStringTemplateExpression()
{
// If parsing a template or string template, look to see if we are parsing tokens within a
// separate template expression. We want to avoid fusing two expressions together
// eg. "${x} ${(x)}" will otherwise try to parse the tokens of the first expr along with the
// tokens of the second as a method bad call instead of just stopping after the first.
// We can determine if we've crossed an expression boundary by examining the prior token and
// checking for the '}' terminal in the whitespace (non-source code content is considered
// whitespace while parsing a template).
if( getTokenizerInstructor() != null )
{
Token priorToken = getTokenizer().getTokenAt( getTokenizer().getState() - 1 );
return priorToken != null &&
priorToken.getType() == ISourceCodeTokenizer.TT_WHITESPACE &&
(priorToken.getStringValue().indexOf( '}' ) >= 0 ||
priorToken.getStringValue().indexOf( '>' ) >= 0);
}
return false;
}
private ISymbol possiblyResolveFunctionSymbol( MethodCallExpression e, String strFunction )
{
ISymbol functionSymbol = _symTable.getSymbol( strFunction );
if( functionSymbol == null )
{
// Try to find a captured closure
functionSymbol = resolveSymbol( e, strFunction, false );
//noinspection ThrowableResultOfMethodCallIgnored
e.removeParseException( Res.MSG_BAD_IDENTIFIER_NAME );
if( !(functionSymbol instanceof CapturedSymbol) )
{
functionSymbol = null;
}
}
return functionSymbol;
}
private boolean isBlockSym( ISymbol functionSymbol )
{
return functionSymbol != null && functionSymbol.getType() instanceof IBlockType;
}
private boolean staticRefToNonStaticFunc( String stFunction, Expression[] eArgs )
{
if( isParsingStaticFeature() && getGosuClass() != null )
{
List<? extends IDynamicFunctionSymbol> matchingDFSs = getGosuClass().getMemberFunctions( stFunction );
for( IDynamicFunctionSymbol dfs : matchingDFSs )
{
if( !dfs.isStatic() && dfs.getArgs().size() == eArgs.length )
{
return true;
}
}
}
return false;
}
private boolean staticRefToNonStaticProp( String name )
{
if( isParsingStaticFeature() && getGosuClass() != null )
{
IDynamicPropertySymbol dps = getGosuClass().getMemberProperty( name );
if( dps != null && !dps.isStatic() )
{
return true;
}
}
return false;
}
private IFunctionType boundFunctionType( IFunctionType funcType )
{
if( funcType.isGenericType() )
{
IGenericTypeVariable[] typeVariables = funcType.getGenericTypeVariables();
List<IType> functionTypeVars = new ArrayList<>();
addTypeVarsToList( functionTypeVars, typeVariables );
return (IFunctionType)TypeLord.boundTypes( funcType, functionTypeVars );
}
else
{
return funcType;
}
}
private IDynamicFunctionSymbol getDFSForFunctionType( String strFunction, MethodScore bestMethod )
{
List<IFunctionSymbol> list = getDfsDeclsForFunction( strFunction );
IInvocableType rawFunctionType = bestMethod.getRawFunctionType();
for( IFunctionSymbol dfs : list )
{
IType dfsType = dfs.getType();
if( dfsType.equals( rawFunctionType ) || dfsType.equals( rawFunctionType.getGenericType() ) )
{
return (IDynamicFunctionSymbol) dfs;
}
}
throw new IllegalStateException( "Could not find matching DFS in " + list + " for type " + rawFunctionType );
}
private void verifyNotCallingOverridableFunctionFromCtor( MethodCallExpression mce )
{
if( !isParsingConstructor() || isParsingBlock() )
{
return;
}
IFunctionSymbol fs = mce.getFunctionSymbol();
if( fs instanceof DynamicFunctionSymbol &&
!(fs instanceof SuperConstructorFunctionSymbol) &&
!(fs instanceof ThisConstructorFunctionSymbol) )
{
DynamicFunctionSymbol dfs = (DynamicFunctionSymbol)fs;
verifyOrWarn( mce, dfs.isPrivate() || dfs.isFinal() || dfs.isStatic(), true, Res.MSG_CALLING_OVERRIDABLE_FROM_CTOR, dfs.getName() );
}
}
private void verifyNotCallingOverridableFunctionFromCtor( BeanMethodCallExpression mce )
{
if( !isParsingConstructor() || isParsingBlock() )
{
return;
}
if( mce.getRootExpression() instanceof Identifier &&
((Identifier)mce.getRootExpression()).getSymbol() != null &&
Keyword.KW_this.equals( ((Identifier)mce.getRootExpression()).getSymbol().getName() ) )
{
IMethodInfo mi = mce.getMethodDescriptor();
if( mi instanceof AbstractGenericMethodInfo )
{
ReducedDynamicFunctionSymbol dfs = ((AbstractGenericMethodInfo)mi).getDfs();
if (!((AbstractGenericMethodInfo)mi).getDfs().isSuperOrThisConstructor())
{
verifyOrWarn( mce, dfs.isPrivate() || dfs.isFinal() || dfs.isStatic(), true, Res.MSG_CALLING_OVERRIDABLE_FROM_CTOR, dfs.getName() );
}
}
}
}
private boolean isParsingConstructor()
{
if( !isParsingFunction() )
{
return false;
}
FunctionType funcType = peekParsingFunction();
return funcType.getReturnType() == getGosuClass() &&
funcType.getName().equals( funcType.getReturnType().getRelativeName() );
}
private IType[] parsePossibleFunctionParameterization( String name, MethodCallExpression e )
{
if( match( null, "<", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
List<IFunctionType> listFunctionTypes = getFunctionTypesForName( name );
for( IFunctionType ftype : listFunctionTypes )
{
if( ftype.isGenericType() )
{
return parseFunctionParameterization( e );
}
}
}
return null;
}
private void parseIdentifier( String[] T )
{
parseIdentifier( new Identifier(), T );
}
private void parseIdentifier( Identifier e, String[] T )
{
// Identifier
String name = T[0];
ISymbol s = resolveSymbol( e, name, true );
if( s instanceof DynamicFunctionSymbol )
{
// No function symbols allowed as identifiers; use a blocks for that
s = resolveNamespaceSymbol( e, name );
}
else if( s instanceof DynamicPropertySymbol )
{
//wrap in a PropertyAccessIdentifier to distinguish between identifiers that
//invoke code versus those that do not
e = new PropertyAccessIdentifier( e );
}
IType type = s.getType();
e.setSymbol( s, _symTable );
e.setType( type );
if( s instanceof DynamicPropertySymbol )
{
if( getGosuClass() != null && !(getGosuClass() instanceof IGosuProgram) && getGosuClass().isAnonymous() && getGosuClass().getEnclosingType() instanceof IGosuEnhancement )
{
if( s.getName().equals( Keyword.KW_outer.toString() ) )
{
verify( e, false, Res.MSG_CANNOT_REFERENCE_OUTER_SYMBOL_WITHIN_ENHANCEMENTS );
}
else
{
verify( e, false, Res.MSG_CANNOT_REFERENCE_ENCLOSING_PROPERTIES_WITHIN_ENHANCEMENTS );
}
}
else if( getGosuClass() != null &&
getGosuClass().getEnclosingType() != null &&
getGosuClass().getEnclosingType().isInterface() &&
s.getName().equals( Keyword.KW_outer.toString() ) )
{
verify( e, false, Res.MSG_BAD_IDENTIFIER_NAME, name );
}
}
verify( e, !(s instanceof AmbiguousSymbol), Res.MSG_AMBIGUOUS_SYMBOL_REFERENCE, name );
pushExpression( e );
verify( e, !(getCurrentEnclosingGosuClass() instanceof IBlockClass) ||
!Keyword.KW_super.equals( s.getName() ), Res.MSG_SUPER_NOT_ACCESSIBLE_FROM_BLOCK );
if( !(type instanceof ErrorType) && !(type instanceof IFunctionType) )
{
//resolve a symbol, let's make sure its case is correct
verifyCase( e, name, s.getName(), Res.MSG_VAR_CASE_MISMATCH, false );
}
if( e instanceof PropertyAccessIdentifier )
{
verifyReifiedCallHasProperContext( (PropertyAccessIdentifier)e, (DynamicPropertySymbol)s );
}
}
private void verifyReifiedCallHasProperContext( PropertyAccessIdentifier e, DynamicPropertySymbol dps )
{
IType type = dps.getType();
if( type instanceof IErrorType )
{
return;
}
IFunctionType funcType = (IFunctionType)(dps.isReadable()
? dps.getGetterDfs().getType()
: dps.getSetterDfs().getType());
if( funcType == null || !Modifier.isReified( funcType.getModifiers() ) )
{
return;
}
verifyTypeVarAreReified( e, funcType );
}
private void parseIdentifierOrTypeLiteralOrEnumConstant( String[] T, int iOffset, int iLineNum, int iColumn )
{
// Identifier
String name = T[0];
parseIdentifier( T );
Expression identifier = peekExpression();
IType type = identifier.getType();
if( type instanceof ErrorType || (type instanceof IFunctionType && !(type instanceof IBlockType)))
{
// Couldn't parse an identifer , lets try parsing static member-access or a type literal...
// Note we must parse a type literal here instead of (or in addition to)
// parseLiteral() because Identifiers and TypeLiterals have the same
// start symbol.
// Set the errant expression's location so that its subordinate expressions are not
// included in the expression we try next...
setLocation( iOffset, iLineNum, iColumn );
getLocationsList().remove( peekExpression().getLocation() );
Expression errantExpression = popExpression();
// See if it can be parsed as an inferred enum expression
MemberAccess enumConstExpr = parseUnqualifiedEnumConstant( T[0] );
if( enumConstExpr != null )
{
pushExpression( enumConstExpr );
enumConstExpr.setStartOffset( iOffset );
}
else
{
// Try parsing a type literal
tryToMakeTypeLiteral( T, iOffset, iLineNum, iColumn, name, errantExpression );
}
}
}
private void tryToMakeTypeLiteral( String[] T, int iOffset, int iLineNum, int iColumn, String name, Expression errantExpression )
{
TypeLiteral tl = resolveTypeLiteral(T);
boolean bArrayOrParameterzied = resolveArrayOrParameterizationPartOfTypeLiteral( T, false, tl );
if( !bArrayOrParameterzied && ((TypeLiteral)peekExpression()).getType().getType() instanceof ErrorType )
{
popExpression();
// All has failed.
// Try to determine the most suitable errant expression and push that
if( staticRefToNonStaticProp( name ) )
{
errantExpression.clearParseExceptions();
verify( errantExpression, false, Res.MSG_CANNOT_REFERENCE_NON_STATIC_PROPERTY_FROM_STATIC_CONTEXT );
}
pushExpression( errantExpression );
}
else
{
if( match( null, "&", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
setLocation( iOffset, iLineNum, iColumn ); // set location of initial type literal (tl)
parseAggregateTypeLiteral(false);
}
TypeLiteral typeLiteral = (TypeLiteral)peekExpression();
verifyTypeVarAreReified( typeLiteral, typeLiteral.getType().getType() );
}
}
private void parseNamespaceStartOrRelativeType( String[] T, boolean bInterface )
{
IType type = resolveNamespace( T[0] );
if( type != null )
{
Identifier e = new Identifier();
ISymbol s = new Symbol( T[0], type, null );
e.setSymbol( s, _symTable );
e.setType( type );
pushExpression( e );
}
else
{
TypeLiteral tl = resolveTypeLiteral( T, true, bInterface );
resolveArrayOrParameterizationPartOfTypeLiteral( T, true, tl );
}
}
private MemberAccess parseUnqualifiedEnumConstant( String strConstValue )
{
IType contextType = getContextType().getType();
if( contextType != null )
{
if( contextType.isEnum() || TypeSystem.get( IEnumValue.class ).isAssignableFrom( contextType ) )
{
try
{
IPropertyInfo property = contextType.getTypeInfo().getProperty( strConstValue );
if( property != null && property.isStatic() )
{
MemberAccess ma = new UnqualifiedEnumMemberAccess();
TypeLiteral e = new TypeLiteral( MetaType.getLiteral( property.getOwnersType() ) );
ma.setRootExpression( e );
ma.setMemberName( property.getName() );
ma.setType( property.getFeatureType() );
ma.setMemberAccessKind( MemberAccessKind.NORMAL );
return ma;
}
}
catch( IllegalArgumentException iae )
{
// skip
}
}
}
return null;
}
private void verifyArgCount( ParsedElement element, int iArgs, IFunctionType funcType )
{
int expectedArgs = funcType.getParameterTypes().length;
if( iArgs != expectedArgs )
{
ParseException pe = new WrongNumberOfArgsException( makeFullParserState(), Res.MSG_WRONG_NUMBER_OF_ARGS_TO_FUNCTION, funcType.getParamSignature(), expectedArgs, iArgs );
pe.setParamTypesExpected( funcType.getParameterTypes() );
pe.setCausedByArgumentList( true );
element.addParseException( pe );
}
}
private void verifyOverrideNotOnMethodThatDoesNotExtend( ParsedElement element, DynamicFunctionSymbol dfs )
{
if( !dfs.isOverride() )
{
return;
}
ParseException pe = null;
ICompilableType gsClass = getGosuClass();
if( gsClass != null )
{
String strPotentialProperty = getPropertyNameFromMethodNameIncludingSetter( dfs.getDisplayName() );
if( strPotentialProperty != null )
{
pe = new DoesNotOverrideFunctionException( makeFullParserState(), Res.MSG_FUNCTION_NOT_OVERRIDE_PROPERTY, dfs.getName(), strPotentialProperty );
}
}
if( pe == null )
{
pe = new DoesNotOverrideFunctionException( makeFullParserState(), Res.MSG_FUNCTION_NOT_OVERRIDE, dfs.getName() );
}
element.addParseException( pe );
}
private void verifyArgCount( ParsedElement element, int iArgs, IConstructorType ctorType )
{
if( ctorType.getConstructor() instanceof DynamicConstructorInfo )
{
return;
}
int expectedArgs = ctorType.getParameterTypes().length;
if( iArgs != expectedArgs )
{
ParseException pe = new ParseException( makeFullParserState(), Res.MSG_WRONG_NUMBER_OF_ARGS_TO_CONSTRUCTOR, ctorType.getArgSignature(), expectedArgs, iArgs );
pe.setParamTypesExpected( ctorType.getParameterTypes() );
pe.setCausedByArgumentList( true );
element.addParseException( pe );
}
}
private void parsePlainFunction( IFunctionSymbol functionSymbol )
{
MethodCallExpression e = new MethodCallExpression();
e.setFunctionSymbol( functionSymbol );
IFunctionType funcType = (IFunctionType)functionSymbol.getType();
e.setType( funcType.getReturnType() );
IType[] argTypes = funcType.getParameterTypes();
boolean bNoArgsProvided;
if( !(bNoArgsProvided = match( null, ')' )) || funcType.hasOptionalParams() )
{
verify( e, argTypes != null && argTypes.length > 0, Res.MSG_NO_ARGUMENTS, functionSymbol.getName() );
MethodScore score = parseArgumentList( getGosuClass(), e, Collections.singletonList( funcType ), null, true, bNoArgsProvided );
if( score.isValid() )
{
List<IExpression> scoreArgs = score.getArguments();
verifyArgCount( e, scoreArgs.size(), funcType );
//noinspection SuspiciousToArrayCall
e.setArgs( scoreArgs.toArray( new Expression[scoreArgs.size()] ) );
e.setNamedArgOrder( score.getNamedArgOrder() );
}
verify( e, bNoArgsProvided || match( null, ')' ), Res.MSG_EXPECTING_FUNCTION_CLOSE );
}
else
{
verify( e, argTypes == null || argTypes.length == 0, Res.MSG_EXPECTING_ARGS, functionSymbol.getName() );
e.setArgs( null );
}
pushExpression( e );
}
private void parseDynamicFunction( Symbol dynamcSymbol )
{
MethodCallExpression e = new MethodCallExpression();
e.setFunctionSymbol( dynamcSymbol );
e.setType(dynamcSymbol.getType());
if( !match( null, ')' ) )
{
MethodScore score = parseArgumentList( getGosuClass(), e, Collections.singletonList(
new FunctionType( dynamcSymbol.getName(), dynamcSymbol.getType(), IType.EMPTY_ARRAY ) ), null, true, false );
List<IExpression> scoreArgs = score.getArguments();
//noinspection SuspiciousToArrayCall
e.setArgs( scoreArgs.toArray( new Expression[scoreArgs.size()] ) );
e.setNamedArgOrder( score.getNamedArgOrder() );
verify( e, match( null, ')' ), Res.MSG_EXPECTING_FUNCTION_CLOSE );
}
else
{
e.setArgs( null );
}
pushExpression( e );
}
private void parseMemberAccess( Expression rootExpression, MemberAccessKind kind, int iTokenStart, String strMemberName, LazyLightweightParserState state, boolean bParseTypeLiteralOnly )
{
parseMemberAccess( rootExpression, kind, iTokenStart, strMemberName, state, bParseTypeLiteralOnly, false );
}
private void parseMemberAccess( Expression rootExpression, MemberAccessKind kind, final int iTokenStart, final String strMemberName, LazyLightweightParserState state, boolean bParseTypeLiteralOnly, boolean createSynthesizedProperty )
{
BeanMethodCallExpression e = new BeanMethodCallExpression();
IType rootType = rootExpression.getType();
rootType = IGosuClass.ProxyUtil.isProxy( rootType ) && rootType instanceof IGosuClass ? ((IGosuClass) rootType).getJavaType() : rootType;
boolean bExpansion = kind == MemberAccessKind.EXPANSION;
rootType = bExpansion ? TypeLord.getExpandableComponentType( rootType ) : rootType;
if( rootType != null && !rootType.isArray() )
{
boolean bAcceptableType =
BeanAccess.isBeanType( rootType ) ||
rootType == GosuParserTypes.BOOLEAN_TYPE() ||
rootType == GosuParserTypes.STRING_TYPE() ||
rootType == GosuParserTypes.NUMBER_TYPE() ||
rootType instanceof IBlockType ||
rootType instanceof MetaType;
verify( e, bAcceptableType, Res.MSG_EXPECTING_BEANTYPE, rootType.getName() );
}
IType[] typeParameters = null;
try
{
if( !bParseTypeLiteralOnly && !(rootType instanceof ErrorType) && match( null, "<", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
List<IFunctionType> list = new ArrayList<>();
// if any function with the specified name is generic, parse parameterization
getFunctionType( rootType, strMemberName, null, list, this, true );
for( IFunctionType ftype : list )
{
if( ftype.isGenericType() )
{
typeParameters = parseFunctionParameterization( e );
break;
}
}
}
}
catch( ParseException pe )
{
e.addParseException( pe );
}
int iParenStart = _tokenizer.getTokenStart();
int mark = _tokenizer.mark();
if( !bParseTypeLiteralOnly && !isInSeparateStringTemplateExpression() && match( null, null, '(', true ) && !isBlockInvoke( rootExpression, strMemberName, rootType ) )
{
// Method call
match( null, '(' );
parseMethodMember( rootExpression, kind, iTokenStart, strMemberName, state, bParseTypeLiteralOnly, createSynthesizedProperty, e, rootType, bExpansion, typeParameters, iParenStart, mark);
}
else
{
// Property access
parsePropertyMember( rootExpression, kind, iTokenStart, strMemberName, state, bParseTypeLiteralOnly, createSynthesizedProperty, rootType, bExpansion );
}
}
private void parseMethodMember( Expression rootExpression, MemberAccessKind kind, int iTokenStart, String strMemberName, LazyLightweightParserState state, boolean bParseTypeLiteralOnly, boolean createSynthesizedProperty, BeanMethodCallExpression e, IType rootType, boolean bExpansion, IType[] typeParameters, int iParenStart, int mark )
{
int iLocationsCount = _locations.size();
parseMethodMember( rootExpression, kind, iTokenStart, strMemberName, state, bParseTypeLiteralOnly, e, rootType, bExpansion, typeParameters, iParenStart );
Expression expr = peekExpression();
if( expr.hasParseExceptions() )
{
maybeOpenParenIsForParenthesizedExpr( rootExpression, kind, iTokenStart, strMemberName, state, bParseTypeLiteralOnly, createSynthesizedProperty, e, rootType, bExpansion, typeParameters, iParenStart, mark, iLocationsCount );
}
}
private void maybeOpenParenIsForParenthesizedExpr( Expression rootExpression, MemberAccessKind kind, int iTokenStart, String strMemberName, LazyLightweightParserState state, boolean bParseTypeLiteralOnly, boolean createSynthesizedProperty, BeanMethodCallExpression e, IType rootType, boolean bExpansion, IType[] typeParameters, int iParenStart, int mark, int iLocationsCount )
{
if( !isOpenParenOnNextLine( mark ) )
{
return;
}
backtrack( mark, iLocationsCount );
parsePropertyMember( rootExpression, kind, iTokenStart, strMemberName, state, bParseTypeLiteralOnly, createSynthesizedProperty, rootType, bExpansion );
Expression expr = peekExpression();
if( expr.hasParseExceptions() )
{
// Failed to parse as Property, reparse as Method call
_tokenizer.restoreToMark( mark );
expr = popExpression();
removeInnerClasses( expr );
removeLocationsFrom( iLocationsCount );
parseMethodMember( rootExpression, kind, iTokenStart, strMemberName, state, bParseTypeLiteralOnly, e, rootType, bExpansion, typeParameters, iParenStart );
}
}
private boolean isOpenParenOnNextLine( int mark )
{
if( mark <= 0 )
{
return false;
}
Token priorMarkToken = _tokenizer.getTokenAt( mark - 1 );
return priorMarkToken != null && priorMarkToken.getType() == ISourceCodeTokenizer.TT_WHITESPACE && priorMarkToken.getText().indexOf( '\n' ) >= 0;
}
private void parseMethodMember( Expression rootExpression, MemberAccessKind kind, int iTokenStart, String strMemberName, LazyLightweightParserState state, boolean bParseTypeLiteralOnly, BeanMethodCallExpression e, IType rootType, boolean bExpansion, IType[] typeParameters, int iParenStart )
{
e.setArgPosition( iParenStart + 1 );
e.setRootExpression( rootExpression );
IMethodInfo md = null;
List<IFunctionType> listFunctionTypes = getPreliminaryFunctionTypes( strMemberName, e, rootType, typeParameters );
boolean bNoArgsProvided;
if( !(bNoArgsProvided = match( null, ')' )) ||
(listFunctionTypes.size() == 1 && listFunctionTypes.get( 0 ).hasOptionalParams()) )
{
MethodScore methodScore = parseArgumentList( rootType, e, listFunctionTypes, typeParameters, !(rootType instanceof ErrorType), bNoArgsProvided );
//noinspection SuspiciousToArrayCall
Expression[] eArgs = methodScore.getArguments().toArray( new Expression[methodScore.getArguments().size()] );
e.setArgs( eArgs );
if( methodScore.isValid() )
{
IFunctionType funcType = (IFunctionType)methodScore.getInferredFunctionType();
verifyArgCount( e, eArgs.length, funcType );
assert funcType != null;
e.setType( (!bExpansion || funcType.getReturnType().isArray() || funcType.getReturnType() == JavaTypes.pVOID())
? funcType.getReturnType()
: funcType.getReturnType().getArrayType() );
e.setFunctionType( funcType );
IType[] argTypes = funcType.getParameterTypes();
e.setArgTypes( argTypes );
IFunctionType rawFunctionType = (IFunctionType)methodScore.getRawFunctionType();
md = rawFunctionType.getMethodInfo();
if( !md.isVisible( getVisibilityConstraint() ) )
{
// let property assignment as setter call sneak through, otherwise error
verify( e, getPropertyNameFromMethodNameIncludingSetter( strMemberName ) != null, Res.MSG_METHOD_NOT_VISIBLE, strMemberName );
}
//verify( md == null || !bMetaType || accessList.size() > 1 || md.isStatic(), strFunction + " cannot call non-static methods here." );
e.setMethodDescriptor( bExpansion ? new ArrayExpansionMethodInfo( md ) : md );
e.setMemberAccessKind( kind );
e.setNamedArgOrder( methodScore.getNamedArgOrder() );
verifyCase( e, strMemberName, funcType.getName(), state, Res.MSG_FUNCTION_CASE_MISMATCH, false );
verifySuperAccess( rootExpression, e, funcType.getMethodInfo(), strMemberName );
}
else
{
if( !(rootType instanceof ErrorType) && !e.hasParseException( Res.MSG_AMBIGUOUS_METHOD_INVOCATION ) )
{
verify( e, false, state, Res.MSG_NO_SUCH_FUNCTION, strMemberName );
}
e.setType( ErrorType.getInstance() );
}
e.setAccessPath( strMemberName );
verify( e, bNoArgsProvided || match( null, ')' ), Res.MSG_EXPECTING_FUNCTION_CLOSE );
}
else
{
//No parameters were found
if( rootType instanceof ErrorType )
{
e.setType( ErrorType.getInstance() );
e.setArgTypes( IType.EMPTY_ARRAY );
e.setAccessPath( strMemberName );
md = null;
e.setMethodDescriptor( md );
e.setArgs( null );
}
else
{
IFunctionType funcType;
ParseException parseException = null;
try
{
funcType = getFunctionType( rootType, strMemberName, new Expression[0], null, this, true );
}
catch( ParseException pe )
{
funcType = findFunction( listFunctionTypes, bNoArgsProvided );
parseException = pe;
}
if( funcType == null )
{
if( !e.hasParseException( parseException.getMessageKey() ) )
{
e.addParseException( parseException );
}
e.setType( ErrorType.getInstance() );
e.setStartOffset( iTokenStart );
pushExpression( e );
return;
}
if( typeParameters != null )
{
if( verifyCanParameterizeType( e, funcType, typeParameters ) )
{
IFunctionType parameterizedFuncType = (IFunctionType)funcType.getParameterizedType( typeParameters );
if( verify( e, parameterizedFuncType != null, Res.MSG_COULD_NOT_PARAMETERIZE ) )
{
funcType = parameterizedFuncType;
}
}
}
if( isEndOfExpression() )
{
funcType = maybeParameterizeOnCtxType( funcType );
}
IFunctionType boundType = boundFunctionType( funcType );
e.setType( (!bExpansion || funcType.getReturnType().isArray() || funcType.getReturnType() == JavaTypes.pVOID())
? boundType.getReturnType()
: boundType.getReturnType().getArrayType() );
e.setFunctionType( funcType );
IType[] argTypes = funcType.getParameterTypes();
e.setArgTypes( argTypes );
e.setAccessPath( strMemberName );
md = funcType.getMethodInfo();
verify( e, md.isVisible( getVisibilityConstraint() ) || getPropertyNameFromMethodName( strMemberName ) != null, Res.MSG_METHOD_NOT_VISIBLE, strMemberName );
verify( e, !md.isAbstract() ||
!(rootExpression instanceof Identifier) ||
!((Identifier)rootExpression).getSymbol().getName().equals( Keyword.KW_super.toString() ) ||
GosuClass.isObjectMethod( md ),
Res.MSG_ABSTRACT_METHOD_CANNOT_BE_ACCESSED_DIRECTLY, strMemberName );
//verify( md == null || !bMetaType || accessList.size() > 1 || md.isStatic(), strFunction + " cannot call non-static methods here." );
e.setMethodDescriptor( bExpansion ? new ArrayExpansionMethodInfo( md ) : md );
e.setMemberAccessKind( kind );
verifyCase( e, strMemberName, md.getDisplayName(), state, Res.MSG_FUNCTION_CASE_MISMATCH, false );
verifyArgCount( e, 0, funcType );
verifySuperAccess( rootExpression, e, funcType.getMethodInfo(), strMemberName );
e.setArgs( null );
}
}
e.setStartOffset( iTokenStart );
if( md != null && md.isDeprecated() )
{
// Add a warning if the method is deprecated
e.addParseWarning(
new ParseWarningForDeprecatedMember( state.cloneWithNewTokenStartAndTokenEnd( iTokenStart, md.getDisplayName().length() ),
TypeInfoUtil.getMethodSignature( md ), e.getRootType().getName() ) );
}
if( isParsingAbstractConstructor() )
{
handleAbstractCtor( iTokenStart, strMemberName, e, state );
}
verifyNotCallingOverridableFunctionFromCtor( e );
verifyReifiedCallHasProperContext( e );
pushExpression( e );
}
private IFunctionType findFunction( List<IFunctionType> listFunctionTypes, boolean bNoArgsProvided )
{
if( listFunctionTypes.isEmpty() )
{
return null;
}
for( IFunctionType funcType: listFunctionTypes )
{
if( funcType.getParameterTypes().length == 0 == bNoArgsProvided )
{
return funcType;
}
}
return listFunctionTypes.get( 0 );
}
private void verifyReifiedCallHasProperContext( BeanMethodCallExpression e )
{
IType rootType = e.getRootType();
if( rootType instanceof IMetaType )
{
verifyTypeVarAreReified( e.getRootExpression(), ((IMetaType)rootType).getType() );
}
IFunctionType funcType = e.getFunctionType();
if( funcType == null || !Modifier.isReified( funcType.getModifiers() ) )
{
return;
}
verifyTypeVarAreReified( e, funcType );
if( !Modifier.isStatic( funcType.getModifiers() ) &&
funcType.getEnclosingType() instanceof IGosuEnhancement &&
TypeLord.getPureGenericType( funcType.getEnclosingType() ).isGenericType() &&
!e.hasParseExceptions() )
{
verifyTypeVarAreReified( e.getRootExpression(), e.getRootType() );
}
}
private void parsePropertyMember( Expression rootExpression, MemberAccessKind kind, int iTokenStart, String strMemberName, LazyLightweightParserState state, boolean bParseTypeLiteralOnly, boolean createSynthesizedProperty, IType rootType, boolean bExpansion )
{
IPropertyInfo pi = null;
MemberAccess ma = bExpansion
? new MemberExpansionAccess()
: createSynthesizedProperty
? new SynthesizedMemberAccess()
: new MemberAccess();
ma.setRootExpression( rootExpression );
ma.setMemberAccessKind(kind);
IType memberType = null;
try
{
if( rootType instanceof IFunctionType && rootExpression instanceof Identifier )
{
// Can't deref functions, convert to namespace if possible (this is to handle a bad
// relative namespaace problem e.g., print.foo.SomeTime where print is a namespace in gw.api.print ..yeeeaaah)
Identifier identifier = (Identifier)rootExpression;
INamespaceType namespaceType = resolveNamespace( identifier.getSymbol().getName() );
if( namespaceType != null )
{
Symbol sym = new Symbol( namespaceType.getRelativeName(), namespaceType, null );
identifier.setSymbol( sym, getSymbolTable() );
identifier.setType( namespaceType );
rootType = namespaceType;
}
}
if( rootType instanceof INamespaceType )
{
if( !strMemberName.equals( "*" ) )
{
String strType = rootType.getName() + '.' + strMemberName;
// First, try a sub-namespace
memberType = TypeSystem.getNamespace( strType );
if( memberType == null )
{
// Now try a fq type name
memberType = resolveTypeName( strType, true );
if( memberType == null )
{
memberType = resolveNamespace( strType );
}
else if( memberType != null )
{
String[] T = {strType};
TypeLiteral tl = resolveTypeLiteral( T );
resolveArrayOrParameterizationPartOfTypeLiteral( T, bParseTypeLiteralOnly, tl );
tl.setPackageExpression( rootExpression );
return;
}
}
}
}
else if( rootExpression instanceof TypeLiteral )
{
// Try an inner class name
IType typeLiteralType = ((MetaType)rootType).getType();
if( typeLiteralType instanceof IHasInnerClass )
{
memberType = getInnerClass( strMemberName, memberType, (IHasInnerClass)typeLiteralType );
if( memberType != null )
{
if( !shouldParseMemberInstead( strMemberName, rootType, memberType ) )
{
String[] T = new String[1];
T[0] = memberType.getName();
TypeLiteral tl = resolveTypeLiteral( T );
resolveArrayOrParameterizationPartOfTypeLiteral( T, bParseTypeLiteralOnly, tl );
verifyTypeAccessible( tl, memberType );
tl.setPackageExpression( rootExpression );
return;
}
}
}
else if( typeLiteralType instanceof ErrorType )
{
memberType = ErrorType.getInstance();
}
}
if( memberType == null )
{
if( bParseTypeLiteralOnly && !(rootType instanceof ErrorType) )
{
if( rootType instanceof INamespaceType )
{
verify( ma, false, Res.MSG_NO_TYPE_ON_NAMESPACE, strMemberName, rootType == null ? "<no type specified>" : rootType.getName() );
}
else
{
IType errRootType = rootType instanceof IMetaType ? ((IMetaType)rootType).getType() : rootType;
addError( ma, Res.MSG_INVALID_INNER_TYPE, strMemberName, TypeLord.getPureGenericType( errRootType ).getRelativeName() );
}
memberType = ErrorType.getInstance();
}
else
{
pi = BeanAccess.getPropertyInfo( rootType, strMemberName, null, this, getVisibilityConstraint() );
memberType = bExpansion ? new ArrayExpansionPropertyInfo( pi ).getFeatureType() : pi.getFeatureType();
verifyCase( ma, strMemberName, pi.getName(), state, Res.MSG_PROPERTY_CASE_MISMATCH, false );
if( pi.isStatic() && !JavaTypes.ITYPE().isAssignableFrom( rootType ) )
{
IType intrinsicType = rootExpression.getType();
if( rootExpression instanceof Identifier &&
intrinsicType.getRelativeName().equals( ((Identifier)rootExpression).getSymbol().getName() ) )
{
warn( ma, false, Res.MSG_NON_STATIC_ACCESS_WITH_IDENTIFIER_OF_STATIC_MEMBER, pi.getName(), intrinsicType.getName(), ((Identifier)rootExpression).getSymbol().getName(), intrinsicType.getName() );
}
else
{
warn( ma, false, Res.MSG_NON_STATIC_ACCESS_OF_STATIC_MEMBER, pi.getName(), intrinsicType.getName() );
}
}
}
}
}
catch( ParseException e1 )
{
// memberType = ma.getRootType();
if( rootExpression instanceof Identifier && !(rootType instanceof INamespaceType) )
{
// This is to handle yet another bad relative namespaace problem e.g., new activity.ActivityDetailHelper(Activity)
Identifier identifier = (Identifier)rootExpression;
INamespaceType namespaceType = resolveNamespace( identifier.getSymbol().getName().toLowerCase() );
if( namespaceType != null )
{
ISymbol oldSymbol = identifier.getSymbol();
Symbol sym = new Symbol( namespaceType.getRelativeName(), namespaceType, null );
identifier.setSymbol( sym, getSymbolTable() );
identifier.setType( namespaceType );
parseMemberAccess( rootExpression, kind, iTokenStart, strMemberName, state, bParseTypeLiteralOnly );
Expression namespaceExpr = peekExpression();
if( namespaceExpr.hasParseExceptions() )
{
((Identifier)rootExpression).setSymbol( oldSymbol, getSymbolTable() );
rootExpression.setType( oldSymbol.getType() );
}
else
{
return;
}
}
}
if( rootType instanceof INamespaceType )
{
verify( ma, false, Res.MSG_NO_TYPE_ON_NAMESPACE, strMemberName, rootType == null ? "<no type specified>" : rootType.getName() );
}
else
{
ma.addParseException( e1 );
}
memberType = ErrorType.getInstance();
}
ma.setType( memberType );
ma.setMemberName( strMemberName );
ma.setStartOffset( iTokenStart );
if( pi != null && pi.isDeprecated() )
{
// Add a warning if the property is deprecated
ma.addParseWarning( new ParseWarningForDeprecatedMember( state.cloneWithNewTokenStartAndTokenEnd( iTokenStart, pi.getName().length() ),
pi.getName(), ma.getRootType().getName() ) );
}
if( pi != null )
{
verify( ma, !pi.isAbstract() ||
!(rootExpression instanceof Identifier) ||
!((Identifier)rootExpression).getSymbol().getName().equals( Keyword.KW_super.toString() ),
Res.MSG_ABSTRACT_METHOD_CANNOT_BE_ACCESSED_DIRECTLY, strMemberName );
}
verifySuperAccess( rootExpression, ma, pi, strMemberName );
verifyReifiedCallHasProperContext( ma );
pushExpression( ma );
}
private void verifyReifiedCallHasProperContext( MemberAccess e )
{
if( e.getType() instanceof IErrorType )
{
return;
}
IType rootType = e.getRootType();
if( rootType instanceof IErrorType || rootType instanceof INamespaceType )
{
return;
}
if( rootType instanceof IMetaType )
{
verifyTypeVarAreReified( e.getRootExpression(), ((IMetaType)rootType).getType() );
}
IPropertyInfo pi = e.getPropertyInfo();
if( pi instanceof IGosuPropertyInfo )
{
IFunctionType funcType = pi.isReadable( getGosuClass() )
? (IFunctionType)((IGosuPropertyInfo)pi).getDps().getGetterDfs().getType()
: (IFunctionType)((IGosuPropertyInfo)pi).getDps().getSetterDfs().getType();
if( funcType == null || !Modifier.isReified( funcType.getModifiers() ) )
{
return;
}
verifyTypeVarAreReified( e, funcType );
if( !Modifier.isStatic( funcType.getModifiers() ) &&
funcType.getEnclosingType() instanceof IGosuEnhancement &&
TypeLord.getPureGenericType( funcType.getEnclosingType() ).isGenericType() &&
!e.hasParseExceptions() )
{
verifyTypeVarAreReified( e.getRootExpression(), e.getRootType() );
}
}
}
private void verifySuperAccess( Expression rootExpression, Expression memberExpr, IAttributedFeatureInfo feature, String strMemberName )
{
if( feature == null )
{
return;
}
verify( memberExpr, !(rootExpression instanceof SuperAccess) || !feature.isAbstract(),
Res.MSG_ABSTRACT_METHOD_CANNOT_BE_ACCESSED_DIRECTLY, strMemberName );
}
private boolean shouldParseMemberInstead( String strMemberName, IType rootType, IType memberType )
{
IType ctxType = getOwner().getContextType().getType();
if( !(ctxType instanceof IMetaType) )
{
if( memberType != null )
{
try
{
BeanAccess.getPropertyInfo( rootType, strMemberName, null, this, getVisibilityConstraint() );
// The case exists where both an inner class and a member share the same simple name.
// Favor the member parse when the context type is not a MetaType
return true;
}
catch( Exception e )
{
// eat me
}
}
}
return false;
}
private boolean isEndOfArgExpression()
{
int mark = _tokenizer.mark();
try
{
return match( null, ')' ) && isEndOfExpression();
}
finally
{
_tokenizer.restoreToMark( mark );
}
}
private boolean isParenthesisTerminalExpression()
{
return isParenthesisTerminalExpression( false );
}
private boolean isParenthesisTerminalExpression( boolean bMatchOpeningParen )
{
int mark = _tokenizer.mark();
try
{
if( bMatchOpeningParen )
{
boolean b = match(null, '(');
assert b;
}
eatBlock( '(', ')', false );
return isEndOfExpression();
}
finally
{
_tokenizer.restoreToMark( mark );
}
}
private boolean isEndOfExpression()
{
return !(match( null, null, '.', true ) ||
match( null, "?.", SourceCodeTokenizer.TT_OPERATOR, true ) ||
match( null, "*.", SourceCodeTokenizer.TT_OPERATOR, true ));
}
private IType getInnerClass( String strMemberName, IType memberType, IHasInnerClass typeLiteralType )
{
try
{
memberType = typeLiteralType.getInnerClass( strMemberName );
}
catch( IllegalStateException e1 )
{
}
return memberType;
}
private IFunctionType maybeParameterizeOnCtxType( IFunctionType funcType )
{
if( funcType.isGenericType() )
{
//## todo: Should we do this during method scoring?
funcType = funcType.inferParameterizedTypeFromArgTypesAndContextType( IType.EMPTY_ARRAY, getContextType().getType() );
}
return funcType;
}
private List<IFunctionType> getPreliminaryFunctionTypes( String strMemberName, BeanMethodCallExpression e, IType rootType, IType[] typeParameters )
{
// Get a preliminary funcTypes to check arguments. Note we do this to aid in in error feedback and value popup completion.
List<IFunctionType> listFunctionTypes = new ArrayList<>( 8 );
try
{
if( !(rootType instanceof ErrorType) )
{
getFunctionType( rootType, strMemberName, null, listFunctionTypes, this, true );
addJavaPropertyMethods( strMemberName, rootType, listFunctionTypes );
}
}
catch( ParseException pe )
{
addJavaPropertyMethods( strMemberName, rootType, listFunctionTypes );
if( listFunctionTypes.isEmpty() )
{
e.addParseException( pe );
}
}
if( typeParameters != null )
{
listFunctionTypes = parameterizeFunctionTypes( e, typeParameters, listFunctionTypes );
}
return listFunctionTypes;
}
private void addJavaPropertyMethods( String strMemberName, IType rootType, List<IFunctionType> listFunctionTypes )
{
String propName = getPropertyNameFromMethodNameIncludingSetter( strMemberName );
if( propName != null )
{
ITypeInfo ti = rootType.getTypeInfo();
IPropertyInfo pi = ti instanceof IRelativeTypeInfo ? ((IRelativeTypeInfo)ti).getProperty( getGosuClass(), propName ) : ti.getProperty( propName );
if( pi != null )
{
pi = maybeGetProxiedPropertyInfo( propName, pi );
if( pi instanceof IJavaPropertyInfo )
{
IMethodInfo mi;
if( strMemberName.startsWith( "set" ) )
{
mi = pi.isWritable( getGosuClass() ) ? ((IJavaPropertyInfo)pi).getWriteMethodInfo() : null;
}
else if( strMemberName.startsWith( "get" ) )
{
mi = pi.isReadable( getGosuClass() ) ? ((IJavaPropertyInfo)pi).getReadMethodInfo() : null;
}
else
{
mi = pi.isReadable( getGosuClass() ) ? ((IJavaPropertyInfo)pi).getReadMethodInfo() : null;
if( mi == null || mi.getReturnType() != JavaTypes.pBOOLEAN() && mi.getReturnType() != JavaTypes.BOOLEAN() )
{
return;
}
}
if( mi != null )
{
FunctionType functionType = new FunctionType( mi );
if( !listFunctionTypes.contains( functionType ) )
{
listFunctionTypes.add( functionType );
}
}
}
}
}
}
private IPropertyInfo maybeGetProxiedPropertyInfo( String propName, IPropertyInfo pi )
{
if( GosuClass.ProxyUtil.isProxy( pi.getOwnersType() ) )
{
ITypeInfo ti = GosuClass.ProxyUtil.getProxiedType( pi.getOwnersType() ).getTypeInfo();
pi = ti instanceof IRelativeTypeInfo ? ((IRelativeTypeInfo)ti).getProperty( getGosuClass(), propName ) : ti.getProperty( propName );
}
else if( pi instanceof IPropertyInfoDelegate )
{
pi = maybeGetProxiedPropertyInfo( propName, ((IPropertyInfoDelegate)pi).getSource() );
}
return pi;
}
private boolean isBlockInvoke( Expression rootExpression, String strMemberName, IType rootType )
{
if( rootExpression instanceof TypeLiteral )
{
// Don't look up inner classes
IType typeLiteralType = ((MetaType)rootType).getType();
if( typeLiteralType instanceof IHasInnerClass )
{
if( ((IHasInnerClass)typeLiteralType).getInnerClass( strMemberName ) != null )
{
return false;
}
}
}
if( !isErrorType( rootType ) && !(rootType instanceof INamespaceType) && !(rootType instanceof IFunctionType) )
{
IPropertyInfo pi = BeanAccess.getPropertyInfo_NoException( rootType, strMemberName, null, this, getVisibilityConstraint() );
if( pi != null )
{
return pi.getFeatureType() instanceof IBlockType;
}
}
return false;
}
private boolean isErrorType(IType rootType) {
return
rootType instanceof IErrorType ||
(rootType instanceof IMetaType && ((IMetaType) rootType).getType() instanceof IErrorType);
}
private ArrayList<IFunctionType> parameterizeFunctionTypes( Expression expression, IType[] typeParameters, List<IFunctionType> listFunctionTypes )
{
ArrayList<IFunctionType> parameterizedFunctionTypes = new ArrayList<>( 8 );
for( IFunctionType funcType : listFunctionTypes )
{
if( funcType.isGenericType() && verifyCanParameterizeType( expression, funcType, typeParameters ) )
{
IFunctionType parameterizedFuncType = (IFunctionType)funcType.getParameterizedType( typeParameters );
if( verify( expression, parameterizedFuncType != null, Res.MSG_COULD_NOT_PARAMETERIZE ) )
{
if( parameterizedFuncType != null )
{
parameterizedFunctionTypes.add( parameterizedFuncType );
}
}
}
}
return parameterizedFunctionTypes;
}
private void handleAbstractCtor( final int iTokenStart, final String strMemberName, BeanMethodCallExpression e, final IParserState state )
{
IMethodInfo mi = e.getMethodDescriptor();
if( mi instanceof GosuMethodInfo ) {
if( mi.isAbstract() && mi.getContainer().getOwnersType().isAssignableFrom( getGosuClass() ) ) {
//noinspection ThrowableInstanceNeverThrown
e.addParseException(new ParseException(new IParserState() {
public int getLineNumber() {
return state.getLineNumber();
}
public int getTokenColumn() {
return state.getTokenColumn();
}
public String getSource() {
return state.getSource();
}
public int getTokenStart() {
return iTokenStart;
}
public int getTokenEnd() {
return iTokenStart + strMemberName.length();
}
public int getLineOffset() {
return state.getLineOffset();
}
@Override
public IParserState cloneWithNewTokenStartAndTokenEnd( int newTokenStart, int newLength ) {
return null;
}
},
Res.MSG_NO_ABSTRACT_METHOD_CALL_IN_CONSTR,
mi.getDisplayName()));
}
}
}
private boolean verifyCanParameterizeType( ParsedElement elem, IType type, IType[] typeParam )
{
if( !verify( elem, type.isGenericType(), Res.MSG_CANNOT_PARAMETERIZE_NONGENERIC ) )
{
return false;
}
ICompilableTypeInternal gsClass = getGosuClass();
if( gsClass instanceof IGosuClass && !((IGosuClass)gsClass).isHeaderCompiled() )
{
return true;
}
IGenericTypeVariable[] typeVars = type.getGenericTypeVariables();
if( verify( elem, typeParam != null && typeParam.length == typeVars.length, Res.MSG_WRONG_NUM_OF_ARGS, "" ) )
{
assert typeParam != null;
boolean bRet = true;
TypeVarToTypeMap typeVarToTypeMap = new TypeVarToTypeMap();
for( int i = 0; i < typeVars.length; i++ )
{
if( typeVars[i].getTypeVariableDefinition() != null )
{
typeVarToTypeMap.put( typeVars[i].getTypeVariableDefinition().getType(), typeParam[i] );
}
}
for( int i = 0; i < typeVars.length; i++ )
{
IType boundingType = typeVars[i].getBoundingType();
boundingType = TypeLord.getActualType( boundingType, typeVarToTypeMap, true );
bRet = bRet &&
verify( elem,
// Hack to support recursive types
isTypeParamHeaderCompiling( typeParam[i] ) ||
isErrorType( typeParam[i] ) ||
(typeVars[i].getTypeVariableDefinition() != null && typeVars[i].getTypeVariableDefinition().getType().isAssignableFrom( typeParam[i] )) ||
boundingType.isAssignableFrom( typeParam[i] ) ||
boundingType instanceof IGosuClass && ((IGosuClass)boundingType).isStructure() && StandardCoercionManager.isStructurallyAssignable( TypeLord.getPureGenericType( boundingType ), typeParam[i] ),
Res.MSG_TYPE_PARAM_NOT_ASSIGNABLE_TO,
typeParam[i], boundingType );
}
return bRet;
}
return false;
}
private boolean isTypeParamHeaderCompiling( IType typeParam )
{
return (typeParam instanceof IGosuClass && ((IGosuClass)typeParam).isCompilingHeader()) ||
(typeParam instanceof TypeVariableType && ((TypeVariableType)typeParam).getBoundingType() == typeParam) ||
(typeParam instanceof TypeVariableType && isTypeParamHeaderCompiling( ((TypeVariableType)typeParam).getBoundingType() )) ||
(typeParam instanceof TypeVariableType && ((TypeVariableType)typeParam).getBoundingType() == PENDING_BOUNDING_TYPE);
}
private IType[] parseFunctionParameterization( Expression e )
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( null, "<", SourceCodeTokenizer.TT_OPERATOR ) )
{
List<TypeLiteral> paramTypes = parseTypeParameters( null );
verify( e, match( null, ">", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_CLOSING_ANGLE_BRACKET_FOR_TYPE );
if( paramTypes.isEmpty() )
{
return null;
}
makeTypeParameterListClause( iOffset, iLineNum, iColumn, paramTypes );
IType[] types = new IType[paramTypes.size()];
for( int i = 0; i < paramTypes.size(); i++ )
{
TypeLiteral typeLiteral = paramTypes.get( i );
types[i] = (typeLiteral.getType()).getType();
}
return types;
}
return null;
}
private MethodScore parseArgumentList( IType rootType, ParsedElement element, List<? extends IInvocableType> listFunctionTypes,
IType[] typeParams, boolean bVerifyArgs, boolean bNoArgsProvided )
{
// Avoid *nested* method call scoring -- it incurs exponential complexity.
// If the current method call is itself an argument to another method which is scoring,
// this parse tree is going to get thrown away, thus the arguments in this method call
// are of no consequence, only the method call's return type is meaningful, so no
// need to score this call, just grab the first function in the list, parse the args, and bail.
listFunctionTypes = maybeAvoidNestedMethodScoring( listFunctionTypes );
boolean bShouldScoreMethods = listFunctionTypes.size() > 1;
List<MethodScore> scoredMethods = new ArrayList<>();
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = _tokenizer.getTokenColumn();
listFunctionTypes = maybeRemoveNonGenericMethods( listFunctionTypes, typeParams );
List<Integer> namedArgOrder = null;
int mark = getTokenizer().mark();
int iLocationsCount = _locations.size();
MethodScore score = null;
List<Expression> argExpressions = null;
for( int i = 0; i < listFunctionTypes.size() || (i == 0 && listFunctionTypes.isEmpty()); i++ )
{
int iArgs = 0;
argExpressions = new ArrayList<>( 4 );
Set<String> namedArgs = new HashSet<>();
List<LightweightParserState> parserStates = new ArrayList<>( 4 );
IInvocableType funcType = listFunctionTypes.isEmpty() ? null : listFunctionTypes.get( i );
TypeVarToTypeMap inferenceMap = getInferenceMap();
if( inferenceMap == null )
{
inferenceMap = new TypeVarToTypeMap();
}
pushInferenceMap( inferenceMap );
pushTypeVariableTypesToInfer( funcType );
TypeVarToTypeMap masked;
if( inferenceMap.isReparsing() )
{
masked = TypeVarToTypeMap.EMPTY_MAP;
inferenceMap.setReparsing( false );
}
else
{
masked = maskCurrentFunctionTypeVarsFromPriorInference();
}
maybeInferFunctionTypeVarsFromReturnType( funcType, inferenceMap );
List<ITypeVariableType> funcLocalTypeVars = null;
try
{
if( !bNoArgsProvided )
{
if( score != null )
{
backtrackArgParsing( mark, iLocationsCount, (List)score.getArguments() );
}
pushTypeVariableTypesToInfer( funcType );
try
{
do
{
parserStates.add( makeLightweightParserState() );
int iArgPos = parseArgExpression( funcType, iArgs, argExpressions, inferenceMap, parserStates, namedArgs, bShouldScoreMethods || getContextType().isMethodScoring()/* avoid nested scoring */ );
namedArgOrder = assignArgExprPosition( listFunctionTypes, iArgs, namedArgOrder, iArgPos );
iArgs++;
}
while( match( null, ',' ) );
}
finally
{
if( funcType != null )
{
//noinspection unchecked
funcLocalTypeVars = (List)popInferringFunctionTypeVariableTypes();
}
}
}
// Extend the args list with default (or empty) values
addMisingArgsWithDefaultValues( element, funcType, argExpressions, parserStates, bShouldScoreMethods );
if( !bVerifyArgs )
{
score = new MethodScore( rootType, getGosuClass() );
score.setValid( false );
//noinspection unchecked
score.setArguments( (List)argExpressions );
score.setNamedArgOrder( namedArgOrder );
if( i == listFunctionTypes.size() - 1 )
{
maybeReassignOffsetForArgumentListClause( iArgs, argExpressions, iOffset, iLineNum, iColumn );
}
return score;
}
else if( isDynamicMethod( listFunctionTypes ) )
{
if( i == listFunctionTypes.size() - 1 )
{
maybeReassignOffsetForArgumentListClause( iArgs, argExpressions, iOffset, iLineNum, iColumn );
}
return makeDynamicMethodScore( listFunctionTypes, argExpressions );
}
score = scoreMethod( getGosuClass(), rootType, funcType, listFunctionTypes, argExpressions, !bShouldScoreMethods, !hasContextSensitiveExpression( argExpressions ) );
score.setInferenceMap( inferenceMap.isEmpty() ? TypeVarToTypeMap.EMPTY_MAP : new TypeVarToTypeMap( inferenceMap ) );
}
finally
{
if( funcType != null )
{
if( funcLocalTypeVars != null )
{
for( ITypeVariableType tv: funcLocalTypeVars )
{
inferenceMap.remove( tv );
}
}
popInferringFunctionTypeVariableTypes();
}
popInferenceMap( inferenceMap );
inferenceMap = getInferenceMap();
if( inferenceMap != null )
{
// put back masked inferences
inferenceMap.putAll( masked );
}
}
//noinspection unchecked
score.setArguments( (List)argExpressions );
score.setParserStates( parserStates );
scoredMethods.add( score );
if( score.getScore() == 0 && !hasContextSensitiveExpression( argExpressions ) )
{
// perfect score, no need to continue
//## todo: this should not happen, we need to change the key for the scored method cache to be type *hierarchy* sensitive
// e.g., overloaded methods can have different enclosing types, but basically in the same hierarchy; for enhancements we should use the enhanced type, for example.
if( scoredMethods.size() > 1 )
{
scoredMethods.clear();
scoredMethods.add( score );
}
break;
}
}
if( scoredMethods.size() > 1 )
{
Collections.sort( scoredMethods );
}
if( scoredMethods.size() > 1 )
{
scoredMethods = factorInParseErrors( scoredMethods );
if( scoredMethods.size() > 1 )
{
// Check for ambiguity, quit with an error if so
MethodScore score0 = scoredMethods.get( 0 );
MethodScore score1 = scoredMethods.get( 1 );
if( score0.getScore() == score1.getScore() && score0.matchesArgSize() )
{
addError( element, Res.MSG_AMBIGUOUS_METHOD_INVOCATION );
score0.setValid( false );
return score0;
}
}
}
if( scoredMethods.size() > 0 )
{
// Scores are sorted, first one has best score
MethodScore bestScore = scoredMethods.get( 0 );
if( listFunctionTypes.isEmpty() )
{
bestScore.setValid( false );
return bestScore;
}
IInvocableType rawFunctionType = bestScore.getRawFunctionType();
pushTypeVariableTypesToInfer( rawFunctionType );
try
{
MethodScore theScore = reparseWithCorrectFunctionAndGtfo( bestScore, bShouldScoreMethods, argExpressions, element,
mark, iLocationsCount, rootType, typeParams, bVerifyArgs, bNoArgsProvided );
if( theScore != null )
{
return theScore;
}
maybeReassignOffsetForArgumentListClause( argExpressions.size(), argExpressions, iOffset, iLineNum, iColumn );
// Infer the function type
IInvocableType inferredFunctionType = inferFunctionType( rawFunctionType, bestScore.getArguments(), isEndOfArgExpression(), bestScore.getInferenceMap() );
if( !getContextType().isMethodScoring() )
{
// Only do the following if this is *not* a *nested* method scoring call i.e., if the current method call is an argument to another method which is scoring,
// this parse tree is going to get thrown away, the arguments in this method call are of no consequence, only the method call's return type is meaningful,
// so no need to apply implicit coercions etc.
// reverify args
verifyArgTypes( inferredFunctionType.getParameterTypes(), (List)bestScore.getArguments(), bestScore.getParserStates() );
if( bestScore.isValid() )
{
// if the bestScore is valid, bound infered variables to avoid them getting out as raw type variables
inferredFunctionType = maybeBoundFunctionTypeVars( inferredFunctionType, bestScore.getInferenceMap() );
// Some args may need implicit coercions applied
handleImplicitCoercionsInArgs( element, inferredFunctionType.getParameterTypes(),
rawFunctionType.getParameterTypes(),
(List)bestScore.getArguments() );
}
}
//noinspection unchecked
bestScore.setArguments( (List)bestScore.getArguments() );
bestScore.setInferredFunctionType( inferredFunctionType );
bestScore.setNamedArgOrder( namedArgOrder );
return bestScore;
}
finally
{
popInferringFunctionTypeVariableTypes();
}
}
else
{
MethodScore errScore = new MethodScore( IRelativeTypeInfo.Accessibility.NONE, null );
errScore.setValid( false );
errScore.setArguments( Collections.<IExpression>emptyList() );
return errScore;
}
}
private MethodScore reparseWithCorrectFunctionAndGtfo( MethodScore bestScore, boolean bShouldScoreMethods, List<Expression> argExpressions,
ParsedElement element, int mark, int iLocationsCount, IType rootType, IType[] typeParams, boolean bVerifyArgs, boolean bNoArgsProvided )
{
if( bShouldScoreMethods )
{
// Reparse with correct funcType overload in context
return reparseArguments( bestScore, argExpressions, element, mark, iLocationsCount, rootType, typeParams, bVerifyArgs, bNoArgsProvided );
}
else
{
// Reparse if there is an error in a parameter expression and we can possibly
// infer a type in the expression from a subsequent expression.
// For example, given:
// function make<B>( consume(b:B), ref: B ) : B
// make( \ b -> b.Code, MyEnum.Hi )
// the type of 'b' can be inferred from the second argument, thus we must reparse
// with the inference map having B inferred.
TypeVarToTypeMap inferenceMap = bestScore.getInferenceMap();
if( !inferenceMap.isEmpty() &&
argExpressions.stream().anyMatch( ParsedElement::hasParseExceptions ) &&
(getInferenceMap() == null || getInferenceMap().getReparseElement() != element || reparseErrorsAreDifferent( getInferenceMap(), argExpressions )) )
{
inferenceMap = new TypeVarToTypeMap( inferenceMap );
inferenceMap.setReparsing( true );
inferenceMap.setReparseElement( element );
inferenceMap.pushReparseErrors( argExpressions.stream().flatMap( e -> e.getParseExceptions().stream() ).collect( Collectors.toList() ) );
pushInferenceMap( inferenceMap );
try
{
// Reparse with a potentially better inference map
return reparseArguments( bestScore, argExpressions, element, mark, iLocationsCount, rootType, typeParams, bVerifyArgs, bNoArgsProvided );
}
finally
{
inferenceMap.setReparsing( false );
inferenceMap.setReparseElement( null );
inferenceMap.popReparseErrors();
popInferenceMap( inferenceMap );
}
}
}
return null;
}
private boolean reparseErrorsAreDifferent( TypeVarToTypeMap inferenceMap, List<Expression> argExpressions )
{
Stack<List<IParseIssue>> reparseErrors = inferenceMap.getReparseErrorStack();
if( reparseErrors == null || reparseErrors.isEmpty() )
{
return true;
}
List<IParseIssue> newErrors = argExpressions.stream().flatMap( e -> e.getParseExceptions().stream() ).collect( Collectors.toList() );
for( List<IParseIssue> errors: reparseErrors )
{
if( errorsSame( newErrors, errors ) )
{
return false;
}
}
return true;
}
private boolean errorsSame( List<IParseIssue> newErrors, List<IParseIssue> errors )
{
int size = newErrors.size();
if( size != errors.size() )
{
return false;
}
for( int i = 0; i < size; i++ )
{
IParseIssue newErr = newErrors.get( i );
IParseIssue oldErr = errors.get( i );
if( newErr.getLine() != oldErr.getLine() ||
newErr.getColumn() != oldErr.getColumn() ||
newErr.getMessageKey() != oldErr.getMessageKey() )
{
return false;
}
}
return true;
}
private MethodScore reparseArguments( MethodScore bestScore, List<Expression> argExpressions, ParsedElement element, int mark, int iLocationsCount, IType rootType, IType[] typeParams, boolean bVerifyArgs, boolean bNoArgsProvided )
{
backtrackArgParsing( mark, iLocationsCount, argExpressions );
MethodScorer.MethodScoreKey key = null;
long score = bestScore.getScore();
if( !getContextType().isMethodScoring() )
{
// cache the score so other call sites can avoid scoring
key = MethodScorer.instance().putCachedMethodScore( bestScore );
}
MethodScore methodScore = parseArgumentList( rootType, element, Arrays.asList( bestScore.getRawFunctionType() ), typeParams, bVerifyArgs, bNoArgsProvided );
if( key != null )
{
// uncache if arguments are errant, this is to avoid caching too early e.g., backtracking and reparsing differently
for( IExpression arg: methodScore.getArguments() )
{
if( hasParseExceptions( (Expression)arg ) )
{
methodScore.setScore( score );
MethodScorer.instance().removeCachedMethodScore( key );
break;
}
}
}
return methodScore;
}
private TypeVarToTypeMap maskCurrentFunctionTypeVarsFromPriorInference()
{
// this is for this case:
// var listOfLists: List<List<String>>
// var mappedListofLists: List<List<String>> = listOfLists.map( \ list -> list.map( \ e -> e ) )
// where basically we have nested calls involving the same function, simplified:
// listList.map( \ list -> list.map( \ e -> e ) ) // map() nests another call to map()
// where map()'s type var needs to be distinguished in each call, but type vars
// are only distinguished in terms of their declaring type. Because we don't want
// to complicate type vars any further, we instead create a separate "scope" for
// inferring them via masking type vars from nested calls. This technique eliminates
// interference from outer nestings while preserving all other mappings.
TypeVarToTypeMap inferenceMap = getInferenceMap();
List<IType> inferringFunctionTypeVars = peekInferringFunctionTypeVariableTypes();
TypeVarToTypeMap masked = new TypeVarToTypeMap();
for( IType tv: inferringFunctionTypeVars )
{
IType type = inferenceMap.remove( (ITypeVariableType)tv );
if( type != null )
{
masked.put( (ITypeVariableType)tv, type );
}
}
return masked;
}
private void maybeInferFunctionTypeVarsFromReturnType( IInvocableType invType, TypeVarToTypeMap inferenceMap )
{
if( !(invType instanceof IFunctionType) )
{
return;
}
IFunctionType funcType = (IFunctionType)invType;
if( funcType.isGenericType() &&
TypeLord.hasTypeVariable( funcType.getReturnType() ) &&
!getContextType().isMethodScoring() &&
getContextType().getType() != null &&
getContextType() != ContextType.EMPTY &&
(getContextType().getUnboundType() == null || !boundCtxType( getContextType().getUnboundType() ).equals( getContextType().getType() )) ) // no sense in inferring type OUT from default type
{
if( isParenthesisTerminalExpression() )
{
// Note we must infer in "reverse" because the context type flows INTO the return type
// For example,
// var list: List<String> = Lists.newArrayList( FooJava.filter( {""}, FooJava.not( \ r -> r.Alpha ) ) )
// The context type, List<String>, can infer type var of Lists.newArrayList() by way of its return type, ArrayList<E>.
// But the inference relationship is reversed, instead of infering from right-to-left, we infer left-to-right, hence the "Reverse" call here:
TypeLord.inferTypeVariableTypesFromGenParamTypeAndConcreteType_Reverse( funcType.getReturnType(), getContextType().getType(), inferenceMap );
}
}
}
private List<MethodScore> factorInParseErrors( List<MethodScore> scoredMethods )
{
List<MethodScore> factored = new ArrayList<>( scoredMethods.size() );
List<MethodScore> noErrors = new ArrayList<>( scoredMethods.size() );
long bestScore = -1;
for( MethodScore score : scoredMethods )
{
boolean bErrors = false;
for( IExpression arg: score.getArguments() )
{
if( arg.hasParseExceptions() )
{
// change the score to reflect errors
score.incScore( 1 );
bErrors = true;
}
else if( arg.getType() instanceof IErrorType )
{
bErrors = true;
}
}
if( !bErrors )
{
noErrors.add( score );
}
// determine best score among the ambiguous calls
bestScore = bestScore >= 0 ? Math.min( bestScore, score.getScore() ) : score.getScore();
}
if( noErrors.size() > 0 )
{
// favor non-errant calls
return noErrors;
}
for( MethodScore score : scoredMethods )
{
if( score.getScore() == bestScore )
{
factored.add( score );
}
}
return factored;
}
private boolean hasContextSensitiveExpression( List<Expression> argExpressions )
{
for( Expression e : argExpressions )
{
if( e instanceof IInferredNewExpression ||
e instanceof UnqualifiedEnumMemberAccess ||
isGenericMethodCall( e ) ||
e instanceof Identifier && e.hasParseExceptions() )
{
return true;
}
}
return false;
}
private boolean isGenericMethodCall( Expression e )
{
if( e instanceof MethodCallExpression )
{
IFunctionType functionType = ((MethodCallExpression) e).getFunctionType();
return functionType != null && (functionType.isGenericType() || functionType.isParameterizedType());
}
return false;
}
private List<? extends IInvocableType> maybeAvoidNestedMethodScoring( List<? extends IInvocableType> listFunctionTypes )
{
if( listFunctionTypes.size() < 2 )
{
return listFunctionTypes;
}
if( !getContextType().isMethodScoring() )
{
// Not nested in a method score
return listFunctionTypes;
}
IType retType = null;
for( IInvocableType funcType: listFunctionTypes )
{
if( !(funcType instanceof IFunctionType) )
{
return listFunctionTypes;
}
IType csr = ((IFunctionType)funcType).getReturnType();
if( retType != null && csr != retType )
{
// functions have different return types, must score methods here to find correct type for enclosing method score
return listFunctionTypes;
}
retType = csr;
}
// Return types are all the same, no need to perform nested method scoring, so only parse against one of them
return Collections.singletonList( listFunctionTypes.get( 0 ) );
}
private void backtrackArgParsing( int mark, int iLocationsCount, List<Expression> argExpressions )
{
for( int i = argExpressions.size()-1; i >= 0; i
{
backtrack( mark, iLocationsCount, argExpressions.get( i ) );
}
}
private List<? extends IInvocableType> maybeRemoveNonGenericMethods( List<? extends IInvocableType> listFunctionTypes, IType[] typeParams )
{
// if there were type parameters, remove any non-generic functions
if( typeParams != null && typeParams.length > 0 )
{
ArrayList<IInvocableType> genericFunctions = new ArrayList<>();
for( IInvocableType type : listFunctionTypes )
{
if( type.isGenericType() )
{
genericFunctions.add( type );
}
}
if( !genericFunctions.isEmpty() )
{
listFunctionTypes = genericFunctions;
}
}
return listFunctionTypes;
}
private void maybeReassignOffsetForArgumentListClause( int iArgs, List<Expression> argExpressions, int iOffset, int iLineNum, int iColumn )
{
boolean noLocations = true;
if( iArgs > 0 )
{
// Maybe reassign offset for ArgumentListClause...
for( Expression argExpr: argExpressions )
{
if( argExpr.getLocation() == null )
{
continue;
}
noLocations = false;
int iExpressionOffset = argExpr.getLocation().getOffset();
if( iExpressionOffset < iOffset )
{
// Can happen if first arg is a NotAWordExpression in which case the expr's length is 0 and its offset is the
// previous token's ending offset, which will likely be less than the first token in the arg position's offset.
iOffset = iExpressionOffset;
iLineNum = argExpr.getLocation().getLineNum();
iColumn = argExpr.getLocation().getColumn();
}
}
if(!noLocations)
{
ArgumentListClause e = new ArgumentListClause();
pushExpression( e );
setLocation( iOffset, iLineNum, iColumn, true );
popExpression();
}
}
}
private IInvocableType maybeBoundFunctionTypeVars( IInvocableType inferredFunctionType, TypeVarToTypeMap inferenceMap )
{
List<IType> types = new ArrayList<>();
for( IType typeVarType : getCurrentlyInferringFunctionTypeVars() )
{
if( inferenceMap.get( (ITypeVariableType)typeVarType ) == null )
{
IType encType = TypeLord.getPureGenericType( typeVarType.getEnclosingType() );
if( encType != null && TypeLord.getPureGenericType( inferredFunctionType ).isAssignableFrom( typeVarType.getEnclosingType() ) )
{
types.add( typeVarType );
}
}
}
return (IInvocableType) TypeLord.boundTypes( inferredFunctionType, types );
}
private List<Integer> assignArgExprPosition( List<? extends IInvocableType> listFunctionTypes, int iArgs, List<Integer> namedArgOrder, int iArgPos )
{
if( (namedArgOrder != null || iArgPos != iArgs) && listFunctionTypes.size() > 0 )
{
if( namedArgOrder == null )
{
int iSize = listFunctionTypes.get( 0 ).getParameterTypes().length;
namedArgOrder = new ArrayList<>( iSize );
for( int i = 0; i < iSize; i++ )
{
namedArgOrder.add( i );
}
}
namedArgOrder.remove( (Integer)iArgPos );
if( namedArgOrder.size() >= iArgs ) {
namedArgOrder.add( iArgs, iArgPos );
}
else {
namedArgOrder.add( iArgPos );
}
}
return namedArgOrder;
}
void addMisingArgsWithDefaultValues( ParsedElement element, IInvocableType funcType, List<Expression> argExpressions, List<LightweightParserState> parserStates, boolean bShouldScoreMethods )
{
if( funcType != null && funcType.hasOptionalParams() )
{
for( int i = argExpressions.size(); i < funcType.getParameterTypes().length; i++ )
{
if( parserStates != null )
{
parserStates.add( makeLightweightParserState() );
}
argExpressions.add( i, getDefaultValueOrPlaceHolderForParam( i, funcType ) );
}
if( !bShouldScoreMethods )
{
for( Expression argExpr : argExpressions )
{
if( !verify( element, argExpr != DefaultParamValueLiteral.instance(), Res.MSG_MISSING_REQUIRED_ARGUMENTS ) )
{
break;
}
}
}
}
}
private int parseArgExpression( IInvocableType funcType,
int iArgs,
List<Expression> argExpressions,
TypeVarToTypeMap inferenceMap,
List<LightweightParserState> parserStates,
Set<String> namedArgs,
boolean bMethodScoring )
{
IType rawCtxType;
IType boundCtxType;
boolean bError_AnonymousArgFollowsNamedArg = false;
int iArgPos = iArgs;
boolean bAlreadyDef = false;
IType[] paramTypes = funcType == null ? IType.EMPTY_ARRAY : funcType.getParameterTypes();
IType retainTypeVarsCtxType = null;
if( match( null, ":", ISourceCodeTokenizer.TT_OPERATOR, true ) )
{
iArgPos = parseNamedParamExpression( funcType, bMethodScoring );
if( iArgPos == -1 )
{
namedArgs.add( "err" );
}
else if( funcType != null )
{
String[] parameterNames = funcType.getParameterNames();
bAlreadyDef = namedArgs.add( parameterNames[iArgPos] );
}
if( argExpressions.size() < iArgPos+1 )
{
// Extend the args list with default (or empty) values up to, but not including, the newly parsed arg
for( int i = argExpressions.size(); i < iArgPos; i++ )
{
argExpressions.add( i, getDefaultValueOrPlaceHolderForParam( i, funcType ) );
parserStates.add( i, makeLightweightParserState() );
}
assert argExpressions.size() == iArgPos;
}
}
else if( !namedArgs.isEmpty() )
{
bError_AnonymousArgFollowsNamedArg = true;
}
IType ctxType = iArgPos < 0 ? ErrorType.getInstance() : iArgPos < paramTypes.length ? paramTypes[iArgPos] : null;
ctxType = ctxType == null ? useDynamicTypeIfDynamicRoot( funcType, ctxType ) : ctxType;
rawCtxType = ctxType == null ? null : inferArgType( ctxType, inferenceMap );
if( ctxType == null )
{
boundCtxType = null;
}
else if( rawCtxType.isGenericType() && !rawCtxType.isParameterizedType() )
{
boundCtxType = TypeLord.getDefaultParameterizedType( rawCtxType );
}
else
{
boundCtxType = boundCtxType( rawCtxType );
if( rawCtxType instanceof IBlockType )
{
retainTypeVarsCtxType = boundCtxType( rawCtxType, true );
}
else if( rawCtxType != null )
{
// handle functional interface types
IFunctionType ftype = rawCtxType.getFunctionalInterface();
if( ftype != null )
{
retainTypeVarsCtxType = boundCtxType( rawCtxType, true );
}
}
}
ContextType ctx = retainTypeVarsCtxType != null
? ContextType.makeBlockContexType( ctxType, retainTypeVarsCtxType, bMethodScoring )
: new ContextType( boundCtxType, ctxType, bMethodScoring );
parseExpressionNoVerify( ctx );
Expression expression = popExpression();
verify( expression, !bError_AnonymousArgFollowsNamedArg, Res.MSG_EXPECTING_NAMED_ARG );
inferFunctionTypeVariables( ctxType, boundCtxType, expression, inferenceMap );
if( retainTypeVarsCtxType != null )
{
IType actualType = TypeLord.getActualType( expression.getType(), inferenceMap, true );
actualType = boundCtxType( actualType, false );
expression.setType( actualType );
}
iArgPos = iArgPos < 0 ? iArgs : iArgPos;
if( iArgPos >= 0 )
{
if( iArgPos == argExpressions.size() )
{
argExpressions.add( iArgPos, expression );
}
else if( iArgPos >= 0 && iArgPos < argExpressions.size() && bAlreadyDef )
{
Expression existingExpr = argExpressions.set( iArgPos, expression );
verify( expression,
existingExpr == DefaultParamValueLiteral.instance() ||
existingExpr instanceof DefaultArgLiteral ||
existingExpr instanceof NullExpression,
Res.MSG_ARGUMENT_ALREADY_DEFINED );
}
}
return iArgPos;
}
private IType useDynamicTypeIfDynamicRoot( IInvocableType funcType, IType ctxType )
{
if( funcType instanceof FunctionType && ((FunctionType)funcType).getMethodInfo() != null )
{
IMethodInfo mi = ((FunctionType)funcType).getMethodInfo();
if( mi.getOwnersType().isDynamic() )
{
ctxType = ((FunctionType)funcType).getReturnType();
}
}
return ctxType;
}
private Expression getDefaultValueOrPlaceHolderForParam( int iParam, IInvocableType invType )
{
if( invType == null )
{
return DefaultParamValueLiteral.instance();
}
IExpression[] defValues = invType.getDefaultValueExpressions();
if( defValues == null || defValues.length == 0 )
{
return DefaultParamValueLiteral.instance();
}
IExpression defValue = defValues[iParam];
if( defValue != null )
{
return new DefaultArgLiteral( invType.getParameterTypes()[iParam], defValue );
}
return DefaultParamValueLiteral.instance();
}
private int parseNamedParamExpression( IInvocableType invType, boolean bMethodScoring )
{
match( null, ":", SourceCodeTokenizer.TT_OPERATOR );
parseNamedParamIdentifier();
Identifier identifier = (Identifier)popExpression();
int[] iPos = {-1};
IType type = getParamTypeFromParamName( invType, identifier.getSymbol().getName(), iPos );
identifier.setType( type );
verify( identifier, !(type instanceof ErrorType), Res.MSG_PARAM_NOT_FOUND );
verify( identifier, match( null, "=", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_EQUALS_ASSIGN );
return iPos[0];
}
private IType getParamTypeFromParamName( IInvocableType invType, String strParam, int[] iPos )
{
if( invType == null )
{
return ErrorType.getInstance();
}
String[] names = invType.getParameterNames();
for( int i = 0; i < names.length; i++ )
{
if( names[i].equals( strParam ) )
{
iPos[0] = i;
return invType.getParameterTypes()[i];
}
}
return ErrorType.getInstance();
}
private void parseNamedParamIdentifier()
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = _tokenizer.getTokenColumn();
Token t = new Token();
Identifier expr = new Identifier();
verify( expr, match( t, SourceCodeTokenizer.TT_WORD ) ||
match( t, SourceCodeTokenizer.TT_KEYWORD ), Res.MSG_EXPECTING_NAME_PARAM );
expr.setSymbol( new TypedSymbol( t._strValue, null, null, null, SymbolType.NAMED_PARAMETER ), null );
pushExpression( expr );
setLocation( iOffset, iLineNum, iColumn );
}
private boolean isDynamicMethod( List<? extends IInvocableType> listFunctionTypes )
{
if( listFunctionTypes == null || listFunctionTypes.isEmpty() )
{
return false;
}
IInvocableType invoType = listFunctionTypes.get( 0 );
if( invoType instanceof FunctionType )
{
IMethodInfo mi = ((FunctionType)invoType).getMethodInfo();
if( mi != null && (mi.getOwnersType() instanceof IPlaceholder || mi instanceof DynamicMethodInfo) )
{
return true;
}
}
return false;
}
private MethodScore makeDynamicMethodScore( List<? extends IInvocableType> listFunctionTypes, List<Expression> argExpressions )
{
MethodScore score = new MethodScore( IRelativeTypeInfo.Accessibility.NONE, getGosuClass() );
score.setValid( true );
//noinspection unchecked
score.setArguments( (List)argExpressions );
IMethodInfo mi = ((FunctionType)listFunctionTypes.get( 0 )).getMethodInfo();
mi = ((ITypeInfo)mi.getContainer()).getMethod( mi.getName(), getTypes( argExpressions ).toArray( new IType[argExpressions.size()] ) );
score.setInferredFunctionType( new FunctionType( mi ) );
score.setRawFunctionType( score.getInferredFunctionType() );
score.setScore( 1 );
return score;
}
private MethodScore scoreMethod( IType callsiteEnclosingType, IType rootType, IInvocableType funcType, List<? extends IInvocableType> listFunctionTypes, List<Expression> argExpressions, boolean bSimple, boolean bLookInCache) {
List<IType> argTypes = new ArrayList<>( argExpressions.size() );
for( Expression argExpression : argExpressions ) {
argTypes.add( argExpression.getType() );
}
return MethodScorer.instance().scoreMethod( callsiteEnclosingType, rootType, funcType, listFunctionTypes, argTypes, getCurrentlyInferringFunctionTypeVars(), bSimple, bLookInCache );
}
private IType boundCtxType( IType ctxType )
{
return boundCtxType( ctxType, false );
}
private IType boundCtxType( IType ctxType, boolean bKeepTypeVars )
{
List<IType> inferringTypes = getCurrentlyInferringFunctionTypeVars();
return TypeLord.boundTypes( ctxType, inferringTypes, bKeepTypeVars );
}
private void inferFunctionTypeVariables( IType rawContextType, IType boundContextType, Expression expression, TypeVarToTypeMap inferenceMap )
{
if( rawContextType != null && boundContextType != null )
{
IType expressionType = expression.getType();
ICoercer iCoercer = CommonServices.getCoercionManager().resolveCoercerStatically( boundContextType, expressionType );
if( iCoercer instanceof IResolvingCoercer )
{
IType resolvedType = ((IResolvingCoercer)iCoercer).resolveType( rawContextType, expressionType );
if( expression instanceof NullExpression )
{
TypeLord.inferTypeVariableTypesFromGenParamTypeAndConcreteType_Reverse( rawContextType, resolvedType, inferenceMap );
TypeLord.inferTypeVariableTypesFromGenParamTypeAndConcreteType_Reverse( rawContextType, expressionType, inferenceMap );
}
else
{
TypeLord.inferTypeVariableTypesFromGenParamTypeAndConcreteType( rawContextType, resolvedType, inferenceMap );
TypeLord.inferTypeVariableTypesFromGenParamTypeAndConcreteType( rawContextType, expressionType, inferenceMap );
}
}
else
{
if( expression instanceof NullExpression )
{
TypeLord.inferTypeVariableTypesFromGenParamTypeAndConcreteType_Reverse( rawContextType, expressionType, inferenceMap );
}
else
{
TypeLord.inferTypeVariableTypesFromGenParamTypeAndConcreteType( rawContextType, expressionType, inferenceMap );
}
}
}
}
private IType inferArgType( IType contextType, TypeVarToTypeMap inferenceMap )
{
TypeLord.addReferencedTypeVarsThatAreNotInMap( contextType, inferenceMap );
return TypeLord.getActualType( contextType, inferenceMap, true );
}
private void handleImplicitCoercionsInArgs( ParsedElement element, IType[] argTypes, IType[] rawArgTypes, List<Expression> args )
{
for( int i = 0; i < argTypes.length && i < args.size(); i++ )
{
IType argType = argTypes[i];
Expression expr = args.get( i );
if( argType != rawArgTypes[i] &&
(argType instanceof IGosuArrayClass || argType instanceof TypeVariableArrayType) &&
!(rawArgTypes[i] instanceof IGosuArrayClass) )
{
// Special case for GosuArray -> JavaArray since a generic function can be inferred via structural equivalence
// e.g., component types are assignable, but different array types on a component type may not be.
argType = rawArgTypes[i];
}
if( expr instanceof DefaultParamValueLiteral )
{
if( !element.hasParseException( Res.MSG_MISSING_REQUIRED_ARGUMENTS ) )
{
addError( element, Res.MSG_MISSING_REQUIRED_ARGUMENTS );
}
}
else
{
args.set( i, possiblyWrapWithImplicitCoercion( expr, argType ) );
}
}
}
// returns a list of lists of unique types at each argument position,
private List<List<IType>> extractContextTypes( List<? extends IInvocableType> funcTypes )
{
if( funcTypes != null )
{
ArrayList<List<IType>> returnList = new ArrayList<>();
for( IInvocableType funcType : funcTypes )
{
for( int i = 0; i < funcType.getParameterTypes().length; i++ )
{
IType paramType = funcType.getParameterTypes()[i];
if( i >= returnList.size() )
{
returnList.add( new ArrayList<>() );
}
List<IType> paramTypeList = returnList.get( i );
if( !paramTypeList.contains( paramType ) )
{
paramTypeList.add( paramType );
}
}
}
return returnList;
}
else
{
return Collections.emptyList();
}
}
private void verifyArgTypes( IType[] argTypes, List<Expression> argExpressions, List<LightweightParserState> parserStates )
{
if( argTypes == null || argTypes.length == 0 || argExpressions == null || argExpressions.size() == 0 )
{
return;
}
for( int i = 0; i < argTypes.length && i < argExpressions.size(); i++ )
{
Expression e = argExpressions.get( i );
LightweightParserState state = parserStates.get( i );
// Adds any parse exceptions that may have been cleared during method scoring
if( e instanceof NullExpression )
{
e.setType( argTypes[i] );
}
else
{
verifyComparable( argTypes[i], e, false, true, state );
}
//Add a warning if a closure with a void return type is passed to a method expecting
//a non-void return value
if( argTypes[i] instanceof FunctionType && e.getType() instanceof FunctionType )
{
FunctionType expectedFunType = (FunctionType)argTypes[i];
FunctionType foundFunType = (FunctionType)e.getType();
if( expectedFunType.getReturnType() != GosuParserTypes.NULL_TYPE() && foundFunType.getReturnType() == GosuParserTypes.NULL_TYPE() )
{
warn( e, false, Res.MSG_VOID_RETURN_IN_CTX_EXPECTING_VALUE );
}
}
if( e.hasParseExceptions() )
{
IParseIssue pe = e.getParseExceptions().get( 0 );
if( pe.getExpectedType() == null )
{
pe.setExpectedType( argTypes[i] );
}
}
}
}
// literal
// number
// string
// <type-literal>
void parseLiteral( Token token )
{
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
_parseLiteral( token );
setLocation( iOffset, iLineNum, iColumn );
}
void _parseLiteral( Token token )
{
if( !parseNumberLiteral( token ) &&
!parseRelativeFeatureLiteral( token ) &&
!parseStringLiteral( token ) &&
!parseCharLiteral( token ) &&
!parseBooleanLiteral( token ) &&
!parseNullLiteral( token ) &&
!parseTypeLiteral( token ) )
{
Expression expr = popExpression();
getLocationsList().remove( expr.getLocation() );
NotAWordExpression notAWord = new NotAWordExpression();
pushExpression( notAWord );
verify( notAWord, false, Res.MSG_SYNTAX_ERROR );
Token T = getTokenizer().getPriorToken();
setLocation( T.getTokenEnd(), T.getLine(), T.getTokenColumn(), true );
}
}
private boolean parseRelativeFeatureLiteral( Token token )
{
if( getGosuClass() != null &&
SourceCodeTokenizer.TT_OPERATOR == token.getType() && "#".equals( token.getStringValue() ) )
{
Expression root = new TypeLiteral( getGosuClass() );
pushExpression( root );
if( parseFeatureLiteral( token, root ) )
{
return true;
}
popExpression();
}
return false;
}
private boolean parseNumberLiteral( Token token )
{
return parseNumberLiteral( token, false );
}
private boolean atNumberLiteralStart()
{
return match( null, null, SourceCodeTokenizer.TT_INTEGER, true ) || match( null, null, '.', true );
}
private boolean parseNumberLiteral( Token token, boolean negated )
{
if( Keyword.KW_NaN == token.getKeyword() )
{
getTokenizer().nextToken();
pushExpression( NumericLiteral.NaN.get().copy() );
return true;
}
else if( Keyword.KW_Infinity == token.getKeyword() )
{
getTokenizer().nextToken();
pushExpression( NumericLiteral.INFINITY.get().copy() );
return true;
}
int mark = getTokenizer().mark();
Token T = new Token();
if( match( T, SourceCodeTokenizer.TT_INTEGER ) )
{
String strValue = (negated ? "-" : "") + T._strValue;
if( getNumericTypeFrom( strValue ) == null && match( null, '.' ) )
{
strValue = maybeStripTypeModifier( strValue, null );
Token tmp = new Token();
if( match( tmp, null, SourceCodeTokenizer.TT_INTEGER, true ) )
{
if( !isPrefixNumericLiteral(tmp._strValue) )
{
match( T, SourceCodeTokenizer.TT_INTEGER );
strValue += '.';
strValue += T._strValue;
}
else
{
strValue += ".0";
}
}
else
{
match( T, SourceCodeTokenizer.TT_INTEGER );
strValue += ".0";
}
}
int lastPos = T.getTokenEnd();
if( match( T, null, SourceCodeTokenizer.TT_WORD, true ) )
{
if( (lastPos >= T.getTokenStart()) && (T._strValue.charAt(0) == 'e' ||
T._strValue.charAt(0) == 'E' ) )
{
match( T, SourceCodeTokenizer.TT_WORD );
if( T._strValue.length() == 1 )
{
strValue += "e";
if( match( T, "+", SourceCodeTokenizer.TT_OPERATOR ) ||
match( T, "-", SourceCodeTokenizer.TT_OPERATOR ) )
{
strValue += T._strValue;
}
if( match( T, SourceCodeTokenizer.TT_INTEGER ) )
{
strValue += T._strValue;
}
else
{
getTokenizer().restoreToMark( mark );
return false;
}
}
else
{
strValue += T._strValue;
}
int end = strValue.length() - 1;
char suffix = ' ';
if ( !Character.isDigit(strValue.charAt(end)) )
{
suffix = strValue.charAt(end);
strValue = strValue.substring(0, end);
}
try
{
BigDecimal bigNum = new BigDecimal( strValue );
strValue = bigNum.toPlainString();
if (bigNum.scale() <= 0)
{
strValue += ".0";
}
}
catch( NumberFormatException e )
{
getTokenizer().restoreToMark( mark );
return false;
}
if( suffix != ' ')
{
strValue += suffix;
}
}
}
parseNumericValue( strValue );
return true;
}
else if( '.' == token.getType() )
{
getTokenizer().nextToken();
String strValue = (negated ? "-" : "") + ".";
if( match( T, SourceCodeTokenizer.TT_INTEGER ) )
{
strValue += T._strValue;
parseNumericValue( strValue );
}
else
{
pushErrorNumberLiteral( Res.MSG_EXPECTING_NUMBER_TO_FOLLOW_DECIMAL );
}
return true;
}
return false;
}
private void parseNumericValue( String strValue )
{
if( isPrefixNumericLiteral( strValue ) && strValue.indexOf( '.' ) != -1 )
{
pushErrorNumberLiteral( Res.MSG_IMPROPER_VALUE_FOR_NUMERIC_TYPE, strValue, JavaTypes.pINT() );
}
else
{
IType numericTypeFrom = getNumericTypeFrom( strValue );
if( numericTypeFrom != null )
{
parseExplicitlyTypedNumericLiteral( strValue, numericTypeFrom );
}
else
{
IType ctxType = getNumberTypeFromContextType( getContextType().isMethodScoring() ? null : getContextType().getType() );
NumericLiteral e;
if( strValue.indexOf( '.' ) != -1 )
{
if( ctxType == JavaTypes.BIG_DECIMAL() )
{
e = new NumericLiteral( strValue, new BigDecimal( strValue ), JavaTypes.BIG_DECIMAL() );
}
else if( ctxType == JavaTypes.RATIONAL() )
{
e = new NumericLiteral( strValue, Rational.get( strValue ), JavaTypes.RATIONAL() );
}
else
{
if( ctxType == JavaTypes.pFLOAT() )
{
e = parseFloat( strValue );
}
else if( ctxType == JavaTypes.pDOUBLE() )
{
e = parseDouble( strValue );
}
else
{
e = parseDoubleOrBigDec( strValue );
}
}
}
else
{
if( ctxType == JavaTypes.BIG_INTEGER() )
{
if( isPrefixNumericLiteral(strValue))
{
strValue = stripPrefix( strValue );
}
e = new NumericLiteral( strValue, new BigInteger( strValue ), JavaTypes.BIG_INTEGER());
}
else if( ctxType == JavaTypes.RATIONAL() )
{
e = new NumericLiteral( strValue, Rational.get( new BigDecimal( strValue ) ), JavaTypes.RATIONAL() );
}
else
{
try
{
if( !strValue.startsWith( "0" ) )
{
if( ctxType == JavaTypes.pFLOAT() )
{
e = parseFloat( strValue );
}
else if( ctxType == JavaTypes.pDOUBLE() )
{
e = parseDouble( strValue );
}
else
{
e = parseIntOrLongOrBigInt( strValue );
}
}
else
{
e = parseIntOrLongOrBigInt( strValue );
}
}
catch( NumberFormatException ex )
{
pushErrorNumberLiteral(Res.MSG_IMPROPER_VALUE_FOR_NUMERIC_TYPE, strValue, JavaTypes.pINT());
return;
}
}
}
pushExpression( e );
}
}
}
private NumericLiteral parseIntOrLongOrBigInt( String strValue )
{
NumericLiteral e;
int radix = 10;
String strippedValue = strValue;
if( isPrefixNumericLiteral( strValue ) )
{
strippedValue = stripPrefix( strValue );
if( isHexLiteral( strValue ) )
{
radix = 16;
}
else if ( isBinLiteral( strValue ) )
{
radix = 2;
}
}
try
{
e = new NumericLiteral( strValue, Integer.parseInt( strippedValue, radix), JavaTypes.pINT() );
}
catch( NumberFormatException nfe )
{
try
{
e = new NumericLiteral( strValue, Long.parseLong( strippedValue, radix ), JavaTypes.pLONG() );
}
catch( NumberFormatException nfe2 )
{
e = new NumericLiteral( strValue, new BigInteger( strippedValue, radix ), JavaTypes.BIG_INTEGER() );
}
}
return e;
}
private String stripPrefix( String strValue ) {
String strippedValue;
if( strValue.startsWith( "-" ) )
{
strippedValue = "-" + strValue.substring( 3 );
}
else
{
strippedValue = strValue.substring( 2 );
}
return strippedValue;
}
private NumericLiteral parseDoubleOrBigDec( String strValue )
{
double dValue = Double.parseDouble( strValue );
if( dValue == Double.POSITIVE_INFINITY || dValue == Double.NEGATIVE_INFINITY )
{
return new NumericLiteral( strValue, new BigDecimal( strValue ), JavaTypes.BIG_DECIMAL() );
}
else
{
return new NumericLiteral( strValue, dValue, JavaTypes.pDOUBLE() );
}
}
private NumericLiteral parseFloat( String strValue )
{
float fValue = Float.parseFloat( strValue );
NumericLiteral floatLiteral = new NumericLiteral( strValue, fValue, JavaTypes.pFLOAT() );
verify( floatLiteral, fValue != Float.POSITIVE_INFINITY && fValue != Float.NEGATIVE_INFINITY, Res.MSG_NUMBER_LITERAL_TOO_LARGE );
return floatLiteral;
}
private NumericLiteral parseDouble( String strValue )
{
double dValue = Double.parseDouble( strValue );
NumericLiteral doubleLiteral = new NumericLiteral( strValue, dValue, JavaTypes.pDOUBLE() );
verify( doubleLiteral, dValue != Double.POSITIVE_INFINITY && dValue != Double.NEGATIVE_INFINITY, Res.MSG_NUMBER_LITERAL_TOO_LARGE );
return doubleLiteral;
}
private void parseExplicitlyTypedNumericLiteral( String strValue, IType numericTypeFrom )
{
if( isPrefixNumericLiteral( strValue ) )
{
parsePrefixNumericLiteral( strValue, numericTypeFrom );
}
else
{
parsePostfixNumericLiteral(strValue, numericTypeFrom, 10);
}
}
private boolean isPrefixNumericLiteral( String strValue )
{
return !(strValue.equalsIgnoreCase( "0b" ) || strValue.equalsIgnoreCase( "0bi" )|| strValue.equalsIgnoreCase( "0bd" ) ||
strValue.equalsIgnoreCase( "-0b" ) || strValue.equalsIgnoreCase( "-0bi" )|| strValue.equalsIgnoreCase( "-0bd" )) &&
(strValue.startsWith( "0x" ) || strValue.startsWith( "0X" ) ||
strValue.startsWith( "0b" ) || strValue.startsWith( "0B" ) ||
strValue.startsWith( "-0x" ) || strValue.startsWith( "-0X" ) ||
strValue.startsWith( "-0b" ) || strValue.startsWith( "-0B" ));
}
private void parsePrefixNumericLiteral( String strValue, IType numericTypeFrom )
{
int radix = 10;
String strippedValue = stripPrefix( strValue );
if( isHexLiteral( strValue ) )
{
radix = 16;
}
else if ( isBinLiteral( strValue ) )
{
radix = 2;
}
parsePostfixNumericLiteral( strippedValue, numericTypeFrom, radix );
}
private boolean isHexLiteral(String num) {
num = num.toLowerCase();
return num.startsWith( "0x" ) ||
num.startsWith( "-0x" );
}
private boolean isBinLiteral(String num) {
num = num.toLowerCase();
boolean hasPrefix = num.startsWith("0b") ||
num.startsWith("-0b");
int b = num.indexOf("b") + 1;
boolean hasDigit = ( b < num.length() ) && Character.isDigit( num.charAt( b ) );
return hasPrefix && hasDigit;
}
private void parsePostfixNumericLiteral( String num, IType numericTypeFrom, int radix )
{
String strValue = maybeStripTypeModifier( num, numericTypeFrom );
try
{
NumericLiteral e;
if( JavaTypes.pBYTE().equals( numericTypeFrom ) )
{
e = new NumericLiteral( strValue, Byte.parseByte( strValue, radix ), JavaTypes.pBYTE() );
}
else if( JavaTypes.pSHORT().equals( numericTypeFrom ) )
{
e = new NumericLiteral( strValue, Short.parseShort( strValue, radix ), JavaTypes.pSHORT() );
}
else if( JavaTypes.pINT().equals( numericTypeFrom ) )
{
e = new NumericLiteral( strValue, Integer.parseInt( strValue, radix ), JavaTypes.pINT() );
}
else if( JavaTypes.pLONG().equals( numericTypeFrom ) )
{
e = new NumericLiteral( strValue, Long.parseLong( strValue, radix ), JavaTypes.pLONG() );
}
else if( JavaTypes.pFLOAT().equals( numericTypeFrom ) )
{
float value = Float.parseFloat( strValue );
e = new NumericLiteral( strValue, value, JavaTypes.pFLOAT() );
verify( e, !Float.isInfinite( value ), Res.MSG_NUMBER_LITERAL_TOO_LARGE );
}
else if( JavaTypes.pDOUBLE().equals( numericTypeFrom ) )
{
double value = Double.parseDouble( strValue );
e = new NumericLiteral( strValue, value, JavaTypes.pDOUBLE() );
verify( e, !Double.isInfinite( value ), Res.MSG_NUMBER_LITERAL_TOO_LARGE );
}
else if( JavaTypes.BIG_INTEGER().equals( numericTypeFrom ) )
{
e = new NumericLiteral( strValue, new BigInteger( strValue , radix), JavaTypes.BIG_INTEGER() );
}
else if( JavaTypes.BIG_DECIMAL().equals( numericTypeFrom ) )
{
e = new NumericLiteral( strValue, new BigDecimal( strValue ), JavaTypes.BIG_DECIMAL() );
}
else if( JavaTypes.RATIONAL().equals( numericTypeFrom ) )
{
e = new NumericLiteral( strValue, Rational.get( strValue ), JavaTypes.RATIONAL() );
}
else
{
throw new IllegalStateException( "Do not know how to parse a numeric type of value " + numericTypeFrom );
}
if( hasTypeModifier( num ) )
{
e.setExplicitlyTyped( true );
}
pushExpression( e );
}
catch( NumberFormatException ex )
{
pushErrorNumberLiteral( Res.MSG_IMPROPER_VALUE_FOR_NUMERIC_TYPE, strValue, numericTypeFrom.getName() );
}
}
private String maybeStripTypeModifier( String strValue, IType numericTypeFrom )
{
if( hasTypeModifier( strValue ) )
{
int modifierLen = JavaTypes.BIG_DECIMAL().equals( numericTypeFrom ) || JavaTypes.BIG_INTEGER().equals( numericTypeFrom ) ? 2 : 1;
strValue = strValue.substring( 0, strValue.length() - modifierLen );
}
return strValue;
}
private boolean hasTypeModifier( String strValue )
{
boolean hex = isHexLiteral( strValue );
char ch = strValue.toLowerCase().charAt( strValue.length() - 1 );
if( hex && ( ch == 's' ) || ( ch == 'l' ) )
{
return true;
}
else if ( !hex && !Character.isDigit(ch) )
{
return true;
}
return false;
}
private void pushErrorNumberLiteral( ResourceKey key, Object... args )
{
NumericLiteral error = new NumericLiteral( "0", 0, JavaTypes.pINT() );
addError( error, key, args );
pushExpression( error );
}
private IType getNumericTypeFrom( String strValue )
{
boolean hex = isHexLiteral( strValue );
boolean bin = isBinLiteral( strValue );
if( !hex && (strValue.endsWith( "b" ) || strValue.endsWith( "B" )) )
{
return JavaTypes.pBYTE();
}
else if( strValue.endsWith( "s" ) || strValue.endsWith( "S" ) )
{
return JavaTypes.pSHORT();
}
else if( strValue.endsWith( "l" ) || strValue.endsWith( "L" ) )
{
return JavaTypes.pLONG();
}
else if( !hex && !bin && (strValue.endsWith( "f" ) || strValue.endsWith( "F" )) )
{
return JavaTypes.pFLOAT();
}
else if( !hex && (strValue.endsWith( "bi" ) || strValue.endsWith( "BI" )) )
{
return JavaTypes.BIG_INTEGER();
}
else if( !hex && !bin && (strValue.endsWith( "bd" ) || strValue.endsWith( "BD" )) )
{
return JavaTypes.BIG_DECIMAL();
}
else if( !hex && !bin && (strValue.endsWith( "d" ) || strValue.endsWith( "D" )) )
{
return JavaTypes.pDOUBLE();
}
else if( !hex && !bin && (strValue.endsWith( "r" ) || strValue.endsWith( "R" )) )
{
return JavaTypes.RATIONAL();
}
else
{
return null;
}
}
private boolean parseCharLiteral( Token token )
{
if( '\'' == token.getType() )
{
getTokenizer().nextToken();
if( token._strValue.length() != 1 )
{
_parseStringLiteral( token._bUnterminated, token );
}
else
{
char c = token._strValue.charAt( 0 );
IType ctxType = getContextType().getType();
Expression e;
if( !getContextType().isMethodScoring() &&
c >= 0 && c <= Byte.MAX_VALUE &&
(ctxType == JavaTypes.pBYTE() || ctxType == JavaTypes.BYTE()) )
{
e = new NumericLiteral( token._strValue, (byte)c, ctxType );
}
else
{
e = new CharLiteral( c );
}
verify( e, token.getInvalidCharPos() < 0, Res.MSG_INVALID_CHAR_AT, token.getInvalidCharPos() );
verify( e, !token._bUnterminated, Res.MSG_UNTERMINATED_STRING_LITERAL );
pushExpression( e );
}
return true;
}
return false;
}
private boolean parseStringLiteralSeparately()
{
final Token token = getTokenizer().getCurrentToken();
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
if( parseStringLiteral( token ) )
{
setLocation( iOffset, iLineNum, iColumn );
return true;
}
return false;
}
private boolean parseStringLiteral( Token token )
{
if( '"' == token.getType() )
{
getTokenizer().nextToken();
_parseStringLiteral( token._bUnterminated, token );
return true;
}
return false;
}
private void _parseStringLiteral( boolean bUnterminatedLiteral, Token t )
{
Expression e;
if( t._strValue.contains( TemplateGenerator.SCRIPTLET_BEGIN ) || t._strValue.contains( TemplateGenerator.ALTERNATE_EXPRESSION_BEGIN ) )
{
e = parseTemplatizedStringLiteral( t );
}
else
{
String strValue = t._strValue;
if( strValue.length() > 0 && strValue.charAt( 0 ) == TemplateGenerator.ESCAPED_SCRIPTLET_MARKER )
{
strValue = strValue.substring( 1 )
.replace( TemplateGenerator.ESCAPED_SCRIPTLET_BEGIN_CHAR, '<' )
.replace( TemplateGenerator.ESCAPED_ALTERNATE_EXPRESSION_BEGIN_CHAR, '$' );
}
e = new StringLiteral( strValue );
}
if( bUnterminatedLiteral )
{
e.addParseException(new ParseException(makeFullParserState(), Res.MSG_UNTERMINATED_STRING_LITERAL));
}
verify( e, t.getInvalidCharPos() < 0, Res.MSG_INVALID_CHAR_AT, t.getInvalidCharPos() );
pushExpression( e );
}
private TemplateStringLiteral parseTemplatizedStringLiteral( Token t )
{
TemplateGenerator template = TemplateGenerator.getTemplate( new StringReader( t._strValue ) );
template.setContextInferenceManager( _ctxInferenceMgr );
template.setForStringLiteral( true );
TemplateStringLiteral e = new TemplateStringLiteral( template );
GosuParser parser = (GosuParser)GosuParserFactory.createParser( _symTable, ScriptabilityModifiers.SCRIPTABLE );
IScriptPartId scriptPart = getScriptPart();
parser.pushScriptPart( scriptPart );
try
{
parser.setEditorParser( isEditorParser() );
parser._ctxInferenceMgr = _ctxInferenceMgr;
template.setUseStudioEditorParser( isEditorParser() );
copyBlockStackTo( parser );
try
{
template.verify( parser, _dfsDeclByName, _typeUsesMap );
}
catch( ParseResultsException exc )
{
// errors are already where they occurred
}
boolean hasIssues = false;
// adjust for open quote
for( IParseTree location : parser.getLocations() )
{
((ParseTree)location).adjustOffset( 1, 0, 0 );
for( IParseIssue parseIssue : location.getParsedElement().getParseIssues() )
{
((ParseIssue)parseIssue).setStateSource( _tokenizer.getSource() );
hasIssues = true;
}
}
setSubTree( parser.getLocations() );
try
{
template.compile( _scriptPartIdStack, _symTable instanceof ThreadSafeSymbolTable ? _symTable : _symTable.copy(), _dfsDeclByName, _typeUsesMap, _blocks, _ctxInferenceMgr );
}
catch( TemplateParseException exc )
{
if( !hasIssues &&
exc.getParseException() != null &&
!(getScriptPart().getContainingType() instanceof IGosuFragment) )
{
List<IParseIssue> parseExceptions = exc.getParseException().getParseExceptions();
for( IParseIssue p : parseExceptions )
{
e.addParseException( p );
}
}
}
return e;
}
finally
{
parser.popScriptPart( scriptPart );
}
}
// type-literal
// name[\<<type-parameter-list>\>][\[\]]
public boolean parseTypeLiteral()
{
return parseTypeLiteral( getTokenizer().getCurrentToken(), false );
}
public boolean parseTypeLiteral( Token token )
{
return parseTypeLiteral( token, false );
}
boolean parseTypeLiteral( boolean bInterface )
{
return parseTypeLiteral( getTokenizer().getCurrentToken(), bInterface );
}
boolean parseTypeLiteral( Token token, boolean bInterface )
{
boolean bNoContextType = getContextType().getType() == null;
if( bNoContextType )
{
// Hint that we are trying to parse a type literal (during indirect member parsing)
pushInferredContextTypes( new ContextType( MetaType.DEFAULT_TYPE_TYPE.get() ) );
}
try
{
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
boolean bSuccess = _parseTypeLiteralWithAggregateSyntax( token, false, bInterface );
if( bSuccess )
{
Expression e = peekExpression();
if( e instanceof TypeLiteral )
{
IType monitorLockType = GosuTypes.IMONITORLOCK();
verify( e,
monitorLockType == null ||
!monitorLockType.equals( ((TypeLiteral)e).getType().getType() ),
Res.MSG_IMONITOR_LOCK_SHOULD_ONLY_BE_USED_WITHIN_USING_STMTS );
}
setLocation( iOffset, iLineNum, iColumn, true );
}
return bSuccess;
}
finally
{
if( bNoContextType )
{
popInferredContextTypes();
}
}
}
void parseTypeLiteralForNewExpression()
{
Token token = getTokenizer().getCurrentToken();
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
setParsingTypeLiteralForNewExpression( true );
try
{
if( _parseTypeLiteralWithAggregateSyntax( token, true, false ) )
{
setLocation( iOffset, iLineNum, iColumn );
}
}
finally
{
setParsingTypeLiteralForNewExpression( false );
}
}
boolean _parseTypeLiteralWithAggregateSyntax( Token token, boolean bIgnoreArrayBrackets, boolean bInterface )
{
boolean bRet = _parseTypeLiteral( token, bIgnoreArrayBrackets, bInterface );
token = getTokenizer().getCurrentToken();
if( SourceCodeTokenizer.TT_OPERATOR == token.getType() && "&".equals( token.getStringValue() ) )
{
parseAggregateTypeLiteral( bInterface );
}
return bRet;
}
boolean _parseTypeLiteral( Token token, boolean bIgnoreArrayBrackets, boolean bInterface )
{
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
if( SourceCodeTokenizer.TT_KEYWORD == token.getType() && Keyword.KW_block == token.getKeyword() )
{
getTokenizer().nextToken();
_parseBlockLiteral();
setLocation( iOffset, iLineNum, iColumn, true );
}
else
{
if( isWordOrValueKeyword( token ) || matchPrimitiveType( false ) )
{
getTokenizer().nextToken();
}
else
{
TypeLiteral typeLiteral = bInterface ? new InterfaceTypeLiteral( ErrorType.getInstance() ) : new TypeLiteral( ErrorType.getInstance() );
verify( typeLiteral, false, Res.MSG_EXPECTING_TYPE_NAME );
pushExpression( typeLiteral );
Token priorT = getTokenizer().getPriorToken();
setLocation( priorT.getTokenEnd(), priorT.getLine(), priorT.getTokenColumn(), true, true );
return false;
}
parseTypeLiteral( new String[]{token.getStringValue()}, bIgnoreArrayBrackets, bInterface, iOffset, iLineNum, iColumn );
}
return true;
}
boolean matchPrimitiveType( boolean bSuperThis ) {
Token token = getTokenizer().getCurrentToken();
if( token.getType() == SourceCodeTokenizer.TT_KEYWORD )
{
boolean bMatch =
Keyword.KW_void == token.getKeyword() ||
Keyword.KW_boolean == token.getKeyword() ||
Keyword.KW_char == token.getKeyword() ||
Keyword.KW_byte == token.getKeyword() ||
Keyword.KW_short == token.getKeyword() ||
Keyword.KW_int == token.getKeyword() ||
Keyword.KW_long == token.getKeyword() ||
Keyword.KW_float == token.getKeyword() ||
Keyword.KW_double == token.getKeyword() ||
(bSuperThis &&
(Keyword.KW_this == token.getKeyword() ||
Keyword.KW_super == token.getKeyword()));
if( bMatch )
{
return true;
}
}
return false;
}
private void parseAggregateTypeLiteral( boolean bInterface )
{
CompoundTypeLiteral typeLiteral = new CompoundTypeLiteral();
List<IType> types = new ArrayList<>();
TypeLiteral typeLiteralComponent = (TypeLiteral) peekExpression();
while( true )
{
addToCompoundType( types );
if( !match( null, "&", SourceCodeTokenizer.TT_OPERATOR ) )
{
break;
}
_parseTypeLiteral( getTokenizer().getCurrentToken(), true, bInterface );
}
verify( typeLiteral, types.size() > 1, Res.MSG_AGGREGATES_MUST_CONTAIN_MORE );
verify( typeLiteral, !(typeLiteralComponent.getType().getType() instanceof TypeVariableType), Res.MSG_ONLY_ONE_TYPE_VARIABLE );
typeLiteral.setType( CompoundType.get( new HashSet<>( types ) ) );
parseArrayType( typeLiteral );
pushExpression( typeLiteral );
}
private void addToCompoundType( List<IType> types )
{
TypeLiteral typeLiteralComponent = (TypeLiteral)popExpression();
IType t = typeLiteralComponent.getType().getType();
verify( typeLiteralComponent, t != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
Set<IType> componentTypes = t.isCompoundType() ? t.getCompoundTypeComponents() : Collections.singleton(t);
for( IType componentType : componentTypes )
{
boolean bFoundClassAlready = false;
for( IType csr : types )
{
if( !(csr instanceof ErrorType) )
{
if( verify( typeLiteralComponent, csr != componentType, Res.MSG_ALREADY_CONTAINS_TYPE, componentType ) )
{
verify( typeLiteralComponent, !(csr.isAssignableFrom( componentType ) || StandardCoercionManager.isStructurallyAssignable( csr, componentType )),Res.MSG_INTERFACE_REDUNDANT, csr, componentType );
verify( typeLiteralComponent, !(componentType.isAssignableFrom( csr ) || StandardCoercionManager.isStructurallyAssignable( componentType, csr )), Res.MSG_INTERFACE_REDUNDANT, componentType, csr );
}
}
if( !csr.isInterface() )
{
bFoundClassAlready = true;
}
verify( typeLiteralComponent, componentType.isInterface() || !bFoundClassAlready, Res.MSG_ONLY_ONE_CLASS_IN_COMPONENT_TYPE );
}
verify( typeLiteralComponent, !componentType.isArray(), Res.MSG_NO_ARRAY_IN_COMPONENT_TYPE );
verify( typeLiteralComponent, !componentType.isPrimitive(), Res.MSG_NO_PRIMITIVE_IN_COMPONENT_TYPE );
types.add( componentType );
}
}
void parseBlockLiteral() {
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
_parseBlockLiteral();
setLocation( iOffset, iLineNum, iColumn );
}
void _parseBlockLiteral()
{
BlockLiteral literal = new BlockLiteral();
verify( literal, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_BLOCK );
ArrayList<IType> argTypes = new ArrayList<>();
ArrayList<String> argNames = new ArrayList<>();
ArrayList<IExpression> defValues = new ArrayList<>();
if( !match( null, ')' ) )
{
do
{
String result;
int state = _tokenizer.mark();
Token t = new Token();
boolean bEquals = false;
Expression defExpr = null;
TypeLiteral blockLiteral = null;
if( match( t, SourceCodeTokenizer.TT_WORD ) )
{
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
result = t._strValue;
}
else if( bEquals = match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
result = t._strValue;
parseExpression();
defExpr = popExpression();
}
else if( match( null, null, '(', true ) )
{
result = t._strValue;
parseBlockLiteral();
blockLiteral = (TypeLiteral) popExpression();
}
else
{
_tokenizer.restoreToMark( state );
result = null;
}
}
else
{
_tokenizer.restoreToMark( state );
result = null;
}
String name = result;
TypeLiteral typeLiteral = null;
if( !bEquals )
{
if ( blockLiteral == null )
{
parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
if( match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseExpression( new ContextType( typeLiteral.getType().getType(), false ) );
defExpr = popExpression();
}
}
else
{
typeLiteral = blockLiteral;
}
argTypes.add( typeLiteral.getType().getType() );
verifyOrWarn( typeLiteral, name != null, true, Res.MSG_BLOCK_TYPES_SHOULD_HAVE_ARG_NAMES );
verify( typeLiteral, typeLiteral.getType().getType() != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
}
else
{
argTypes.add( defExpr.getType() );
verifyOrWarn( literal, name != null, true, Res.MSG_BLOCK_TYPES_SHOULD_HAVE_ARG_NAMES );
verify( literal, defExpr.getType() != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
}
if( defExpr != null )
{
if( verify( defExpr, defExpr.isCompileTimeConstant(), Res.MSG_COMPILE_TIME_CONSTANT_REQUIRED ) )
{
defValues.add( defExpr );
}
}
else
{
defValues.add( null );
}
if( name != null && argNames.contains( name ) )
{
verify( typeLiteral == null ? literal : typeLiteral, false, Res.MSG_VARIABLE_ALREADY_DEFINED, name );
}
argNames.add( name == null ? "" : name );
} while( match( null, ',' ) );
verify( literal, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_BLOCK );
}
argNames.trimToSize();
argTypes.trimToSize();
literal.setArgTypes( argTypes );
literal.setArgNames( argNames );
literal.setDefValueExpressions( defValues );
TypeLiteral returnType;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseTypeLiteral();
returnType = (TypeLiteral)popExpression();
}
else
{
returnType = new TypeLiteral( JavaTypes.pVOID() );
returnType.setType( JavaTypes.pVOID() );
}
verify( literal, argNames.size() <= IBlock.MAX_ARGS, Res.MSG_BLOCKS_CAN_HAVE_A_MOST_SIXTEEN_ARGS );
literal.setReturnType( returnType );
pushExpression( literal );
}
void parseTypeLiteral( String[] T, boolean bIgnoreArrayBrackets, boolean bInterface, int iOffset, int iLineNum, int iColumn )
{
parseCompoundTypeLiteralExpression( T, bInterface, iOffset, iLineNum, iColumn );
Expression expr = popExpression();
if( expr instanceof Identifier && !(expr.getType() instanceof ErrorType) )
{
//!! todo: this is a hack so we can resolve types with the same name as identifiers (case insensitively)
TypeLiteral e = bInterface ? new InterfaceTypeLiteral( MetaType.getLiteral( expr.getType() ) ) : new TypeLiteral( MetaType.getLiteral( expr.getType() ) );
e.setPackageExpression( expr );
expr = e;
}
else if( !(expr instanceof TypeLiteral) )
{
TypeLiteral e = bInterface ? new InterfaceTypeLiteral( ErrorType.getInstance() ) : new TypeLiteral( ErrorType.getInstance() );
e.setPackageExpression( expr );
e.addParseException( new ParseException( null, Res.MSG_EXPECTING_TYPE_NAME ) );
expr = e;
}
IType type = ((TypeLiteral)expr).getType().getType();
verifyTypeAccessible( (TypeLiteral)expr, type );
T[0] = type.getName();
resolveArrayOrParameterizationPartOfTypeLiteral( T, bIgnoreArrayBrackets, (TypeLiteral)expr );
}
private void verifyTypeAccessible( TypeLiteral expr, IType type )
{
ICompilableType gsClass = getGosuClass();
if( gsClass == null || Modifier.isPublic( type.getModifiers() ) )
{
return;
}
IRelativeTypeInfo.Accessibility acc = FeatureManager.getAccessibilityForClass( type, gsClass );
if( Modifier.isPrivate( type.getModifiers() ) )
{
verify( expr,
acc == IRelativeTypeInfo.Accessibility.PRIVATE,
Res.MSG_TYPE_HAS_XXX_ACCESS, type.getName(), Keyword.KW_private.toString() );
}
else if( Modifier.isProtected( type.getModifiers() ) )
{
verify( expr,
acc == IRelativeTypeInfo.Accessibility.PROTECTED ||
acc == IRelativeTypeInfo.Accessibility.INTERNAL ||
acc == IRelativeTypeInfo.Accessibility.PRIVATE,
Res.MSG_TYPE_HAS_XXX_ACCESS, type.getName(), Keyword.KW_protected.toString() );
}
else if( /* package-protected (or internal) */ !Modifier.isPublic( type.getModifiers() ) &&
!Modifier.isProtected( type.getModifiers() ))
{
verify( expr,
acc == IRelativeTypeInfo.Accessibility.INTERNAL ||
acc == IRelativeTypeInfo.Accessibility.PRIVATE,
Res.MSG_TYPE_HAS_XXX_ACCESS, type.getName(), Keyword.KW_internal.toString() );
}
}
private void parseCompoundTypeLiteralExpression( String[] T, boolean bInterface, int iOffset, int iLineNum, int iColumn )
{
parseNamespaceStartOrRelativeType( T, bInterface );
Expression expr = peekExpression();
if( expr.hasParseExceptions() )
{
List<IParseIssue> exceptions = expr.getParseExceptions();
if( exceptions.size() != 1 ||
(exceptions.get( 0 ).getMessageKey() != Res.MSG_CANNOT_REFERENCE_CLASS_TYPE_VAR_IN_STATIC_CONTEXT &&
exceptions.get( 0 ).getMessageKey() != Res.MSG_TYPE_PARAM_NOT_ASSIGNABLE_TO &&
exceptions.get( 0 ).getMessageKey() != Res.MSG_EXPECTING_CLOSING_ANGLE_BRACKET_FOR_TYPE) )
{
ParseException pe = expr.removeParseException( null );// Res.MSG_BAD_IDENTIFIER_NAME );
pe.setMessage( Res.MSG_INVALID_TYPE, T[0] );
expr.addParseException( pe );
}
}
setLocation( iOffset, iLineNum, iColumn );
parseIndirectMemberAccess( iOffset, iLineNum, iColumn, true );
}
private boolean isTypeParameterErrorMsg( Expression expr, List<IParseIssue> exceptions )
{
return exceptions.get( 0 ).getMessageKey() == Res.MSG_TYPE_PARAM_NOT_ASSIGNABLE_TO && expr instanceof TypeLiteral && !isErrorType( ((TypeLiteral)expr).getType().getType() );
}
/**
* @return True if parsed parameterized type.
*/
private boolean resolveArrayOrParameterizationPartOfTypeLiteral( String[] T, boolean bIgnoreArrayBrackets, TypeLiteral e )
{
boolean bArrayOrParameterization = false;
if( !bIgnoreArrayBrackets )
{
bArrayOrParameterization = parseArrayType( e );
}
pushExpression( e );
if( !T[0].endsWith( "[]" ) )
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( null, "<", SourceCodeTokenizer.TT_OPERATOR ) )
{
TypeLiteral typeLiteral = (TypeLiteral)peekExpression();
IType type = typeLiteral.getType().getType();
verify( e, type.isGenericType(), Res.MSG_PARAMETERIZATION_NOT_SUPPORTED_FOR_TYPE, type.getName() );
List<TypeLiteral> paramTypes = parseTypeParameters( type );
verify( e, match( null, ">", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_CLOSING_ANGLE_BRACKET_FOR_TYPE );
makeTypeParameterListClause( iOffset, iLineNum, iColumn, paramTypes );
int numArrays = 0;
while( !bIgnoreArrayBrackets && match( null, '[' ) )
{
verify( e, match( null, ']' ), Res.MSG_EXPECTING_ARRAY_BRACKET );
++numArrays;
}
if( !(type instanceof ErrorType) )
{
IType[] types = new IType[paramTypes.size()];
for( int i = 0; i < paramTypes.size(); i++ )
{
TypeLiteral tl = paramTypes.get( i );
types[i] = (tl.getType()).getType();
}
verifyCanParameterizeType( e, type, types );
typeLiteral.setParameterTypes( types );
type = typeLiteral.getType().getType();
if( numArrays > 0 )
{
for( int i = 0; i < numArrays; i++ )
{
type = type.getArrayType();
}
typeLiteral.setType( MetaType.getLiteral( type ) );
}
}
bArrayOrParameterization = true;
}
else
{
TypeLiteral typeLiteral = (TypeLiteral)peekExpression();
IType type = typeLiteral.getType().getType();
try
{
if( type.isGenericType() && !type.isParameterizedType() && !isParsingCompileTimeConstantExpression() )
{
// If a generic type, assume the default parameterized version e.g., List => List<Object>.
// But if the type is assignable to the context type and the context type is parameterized,
// derive the parameters from the context type.
ContextType ctxType = getContextType();
type = TypeLord.deriveParameterizedTypeFromContext( type, ctxType.getUnboundType() != null ? ctxType.getUnboundType() : ctxType.getType() );
TypeVarToTypeMap inferenceMap = getInferenceMap();
if( inferenceMap != null )
{
type = TypeLord.getActualType( type, inferenceMap, isParsingTypeListeralForNewExpression() );
}
typeLiteral.setType( MetaType.getLiteral( type ) );
}
}
catch( Exception ex )
{
throw GosuExceptionUtil.forceThrow( ex, type.getName() );
}
}
}
return bArrayOrParameterization;
}
private boolean isParsingCompileTimeConstantExpression() {
return getContextType() != null && getContextType().isCompileTimeConstant();
}
private boolean parseArrayType( TypeLiteral tl )
{
boolean bBalancedBrackets = true;
IType baseType = tl.getType().getType();
while( match( null, '[' ) )
{
bBalancedBrackets = match( null, ']' );
if( !bBalancedBrackets )
{
advanceToNextTokenSilently();
}
try
{
baseType = baseType.getArrayType();
}
catch( IllegalArgumentException iae )
{
tl.addParseException( Res.MSG_ARRAY_NOT_SUPPORTED, baseType.getName() );
baseType = ErrorType.getInstance();
}
}
if( baseType != tl.getType().getType() )
{
warn( tl, !tl.getType().getType().isParameterizedType() ||
TypeLord.getDefaultParameterizedType( tl.getType().getType() ) == tl.getType().getType(),
Res.MSG_PARAMETERIZED_ARRAY_COMPONENT );
tl.setType( MetaType.getLiteral( baseType ) );
verify( tl, bBalancedBrackets, Res.MSG_EXPECTING_ARRAY_BRACKET );
return true;
}
return false;
}
// type-parameter-list
// <type-parameter>
// <type-parameter-list> , <type-parameter>
List<TypeLiteral> parseTypeParameters( IType enclosingType )
{
List<TypeLiteral> paramTypes = new ArrayList<>();
int i = 0;
do
{
IType boundingType = JavaTypes.OBJECT();
if( enclosingType != null && enclosingType.isGenericType() )
{
IGenericTypeVariable[] typeVars = enclosingType.getGenericTypeVariables();
if( typeVars != null && typeVars.length > i )
{
boundingType = typeVars[i].getBoundingType();
}
}
parseParameterType( boundingType );
paramTypes.add( (TypeLiteral)popExpression() );
}
while( match( null, ',' ) && ++i > 0 );
return paramTypes;
}
private void makeTypeParameterListClause( int iOffset, int iLineNum, int iColumn, List<TypeLiteral> paramTypes )
{
if( paramTypes.size() > 0 )
{
TypeParameterListClause e = new TypeParameterListClause( paramTypes.toArray( new ITypeLiteralExpression[paramTypes.size()] ) );
pushExpression( e );
boolean bZeroLength = _tokenizer.getTokenStart() == iOffset;
if( bZeroLength )
{
Token priorToken = getTokenizer().getPriorToken();
iOffset = priorToken.getTokenEnd();
iLineNum = priorToken.getLine();
iColumn = priorToken.getTokenColumn();
}
setLocation( iOffset, iLineNum, iColumn, true );
popExpression();
}
}
// type-parameter
// ? [extends <type-literal>]
// ? [super <type-literal>]
// <type-literal>
boolean parseParameterType( IType boundingType )
{
boolean isWildcard = false;
Expression superTypeLiteral = null;
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( null, "?", SourceCodeTokenizer.TT_OPERATOR ) )
{
isWildcard = true;
if( match( null, Keyword.KW_extends ) )
{
if( match( null, "?", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
// A strange, but necessary case. E.g.,
// List<E> has method: addAll( Collection<? extends E> c )
// when we have type List<?> we'll have addAll( Collection<? extends ?> c )
// which essentially needs to be just that - a Collection of something undefined
if( !parseParameterType( boundingType ) )
{
return false;
}
}
else
{
parseTypeLiteral();
}
}
else if( match( null, Keyword.KW_super ) )
{
if( match( null, "?", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
// A strange, but necessary case. E.g.,
// List<E> has method: addAll( Collection<? super E> c )
// when we have type List<?> we'll have addAll( Collection<? super ?> c )
// which essentially needs to be just that - a Collection of something undefined
if( !parseParameterType( boundingType ) )
{
return false;
}
}
else
{
parseTypeLiteral();
superTypeLiteral = popExpression(); // Eat whatever type literal is here (we punt on contravariance)
TypeLiteral typeLiteral = new TypeLiteral( MetaType.getLiteral( boundingType ) );
pushExpression( typeLiteral );
}
}
else
{
pushExpression( new TypeLiteral( JavaTypes.OBJECT() ) );
setLocation( iOffset, iLineNum, iColumn );
}
}
else
{
parseTypeLiteral();
}
TypeLiteral tl = (TypeLiteral)peekExpression();
if( !isAllowingWildcards() )
{
if( superTypeLiteral != null )
{
verify(superTypeLiteral, !isWildcard, Res.MSG_NO_WILDCARDS, tl.getType().getType().getRelativeName() );
}
verify(tl, !isWildcard, Res.MSG_NO_WILDCARDS, tl.getType().getType().getRelativeName() );
}
boxTypeLiteralsType( tl );
return true;
}
private void boxTypeLiteralsType( TypeLiteral tl )
{
IType tlType = tl.getType().getType();
if( !warn( tl, !tlType.isPrimitive(), Res.MSG_PRIMITIVE_TYPE_PARAM, tlType.getName(), TypeSystem.getBoxType( tlType ) ) )
{
tl.setType( TypeSystem.getBoxType( tlType ) );
}
}
// statement
// namespace-statement
// package namespace-name
// uses-statement
// uses type-literal
// uses namespace-ref
// block
// { [statement-list] }
// statement-list
// <statement>
// <statement-list> <statement>
// * Left recursion removed is: *
// statement-list
// <statement> <statement-list2>
// statement-list2
// <statement>
// null
// assignment-statement
// <identifier> = <expression>
// <member-access> = <expression>
// <array-access> = <expression>
// method-call-statement
// <method-call-expression>
// argument-list
// <expression>
// <argument-list> , <expression>
// if-statement
// if ( <expression> ) <statement> [ else <statement> ]
// for...in-statement
// for ( <identifier> in <expression> [ index <identifier> ] ) <statement>
// while-statement
// while ( <expression> ) <statement>
// do...while-statetment
// do <statement> while ( <expression> )
// var-statement
// var <identifier> [ : <type-literal> ] = <expression>
// var <identifier> : <type-literal> [ = <expression> ]
// switch-statement
// switch ( <expression> ) { [switch-cases] [switch-default] }
// switch-cases
// <switch-case>
// <switch-cases> <switch-case>
// switch-case
// case <expression> : [statement-list]
// switch-default
// default : [statement-list]
// continue-statement
// continue
// break-statement
// break
// return-statement
// return <expression>
// return ;
// assert-statement
// assert <expression>
// assert <expression> : <expression>
// class-definition
// [modifiers] class <identifier> [extends <base-class>] [implements <interfaces-list>] { <class-members> }
// function-definition
// [modifiers] function <identifier> ( [ <argument-declaration-list> ] ) [ : <type-literal> ] <statement-block>
// argument-declaration-list
// <argument-declaration>
// <argument-declarationlist> , <argument-declaration>
// argument-declaration
// <identifier> : <type-literal>
// try-catch-finally-statement
// try <statement> [ catch ( <identifier> ) <statement> ] [ finally <statement> ]
boolean parseStatement()
{
return parseStatement( false );
}
boolean parseStatement( boolean bAsStmtBlock )
{
return parseStatement( false, bAsStmtBlock );
}
boolean parseStatement( boolean forceKeepStmtBlock, boolean bAsStmtBlock )
{
incStatementDepth();
try
{
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
checkInstruction( true );
ParseTree prevLocation = peekLocation();
IParsedElement prevStmt =
prevLocation != null && prevLocation.getParsedElement() != null
? prevLocation.getParsedElement() instanceof Statement
? prevLocation.getParsedElement()
: null
: null;
boolean bRet;
boolean bSetLocation = true;
boolean bMatchedBrace = !bAsStmtBlock && match( null, '{' );
if( bMatchedBrace || bAsStmtBlock )
{
parseStatementBlock( forceKeepStmtBlock, bMatchedBrace || !bAsStmtBlock );
bRet = true;
bSetLocation = peekStatement() instanceof StatementList;
}
else
{
bRet = _parseStatement();
}
if( bRet && bSetLocation )
{
// Consume optional trailing semi as part of the statement
match( null, ';' );
setLocation( iOffset, iLineNum, iColumn, bAsStmtBlock );
ParsedElement currentStmt = peekStatement();
if( !(currentStmt instanceof NoOpStatement) && !(prevStmt instanceof NoOpStatement) )
{
warn( currentStmt,
prevStmt == null || prevStmt.getLineNum() != currentStmt.getLineNum()
|| prevStmt.hasParseExceptions() || currentStmt.hasParseExceptions()
|| (!ILanguageLevel.Util.STANDARD_GOSU() && hasSemicolon( prevStmt )),
Res.MSG_STATEMENT_ON_SAME_LINE );
}
}
return bRet;
}
finally
{
decStatementDepth();
}
}
private boolean hasSemicolon( IParsedElement stmt )
{
if( stmt == null )
{
return false;
}
IParseTree location = stmt.getLocation();
String stmtText = getTokenizer().getSource().substring( location.getOffset(), location.getExtent() + 1 );
return stmtText.length() > 0 && stmtText.charAt( stmtText.length()-1 ) == ';';
}
boolean parseLoopStatement()
{
_iBreakOk++;
_iContinueOk++;
try
{
return parseStatement();
// if( parseStatement() ) {
// Statement stmt = peekStatement();
// warn( stmt, doesLoopCycle( stmt ), Res.MSG_LOOP_DOESNT_LOOP );
// return true;
// return false;
}
finally
{
_iBreakOk
_iContinueOk
}
}
//## todo: doesn't handle this case (break is absolute with help of continue, but info about continue does not exist past the break)
//## propbably best to change getLeastSignificantTerminal() to return something that provides more information, yet retains the same
//## terminal statement for normal unreachable code detection.
// if( cond ) {
// break
// else {
// continue
// } while( true )
// private boolean doesLoopCycle( Statement stmt )
// boolean[] bAbsolute = {false};
// ITerminalStatement termStmt = stmt.getLeastSignificantTerminalStatement( bAbsolute );
// if( termStmt == null || !bAbsolute[0] ) {
// return true;
// return termStmt.getTerminalType() == TerminalType.Continue;
boolean _parseStatement()
{
Token token = getTokenizer().getCurrentToken();
final Keyword keyword = token.getKeyword();
if( areUsingStatementsAllowedInStatementLists() && Keyword.KW_uses == keyword )
{
_tokenizer.nextToken();
parseUsesStatement();
}
else if( Keyword.KW_if == keyword )
{
_tokenizer.nextToken();
parseIfStatement();
}
else if( Keyword.KW_try == keyword )
{
_tokenizer.nextToken();
parseTryCatchFinallyStatement();
}
else if( Keyword.KW_throw == keyword )
{
_tokenizer.nextToken();
parseThrowStatement();
}
else if( Keyword.KW_continue == keyword )
{
_tokenizer.nextToken();
ContinueStatement stmt = new ContinueStatement();
verify( stmt, _iContinueOk > 0, Res.MSG_CONTINUE_OUTSIDE_LOOP );
pushStatement( stmt );
}
else if( Keyword.KW_break == keyword )
{
_tokenizer.nextToken();
BreakStatement stmt = new BreakStatement();
verify( stmt, _iBreakOk > 0, Res.MSG_BREAK_OUTSIDE_SWITCH_OR_LOOP );
pushStatement( stmt );
}
else if( Keyword.KW_return == keyword )
{
_tokenizer.nextToken();
parseReturnStatement();
}
else if( Keyword.KW_foreach == keyword || Keyword.KW_for == keyword )
{
_tokenizer.nextToken();
parseForEachStatement();
}
else if( Keyword.KW_while == keyword )
{
_tokenizer.nextToken();
parseWhileStatement();
}
else if( Keyword.KW_do == keyword )
{
_tokenizer.nextToken();
parseDoWhileStatement();
}
else if( Keyword.KW_switch == keyword )
{
_tokenizer.nextToken();
parseSwitchStatement();
}
else if( Keyword.KW_using == keyword )
{
_tokenizer.nextToken();
parseUsingStatement();
}
else if( Keyword.KW_assert == keyword )
{
_tokenizer.nextToken();
parseAssertStatement();
}
else if( Keyword.KW_final == keyword )
{
_tokenizer.nextToken();
VarStatement varStmt = new VarStatement();
varStmt.setModifierInfo( new ModifierInfo(0) );
varStmt.setFinal( true );
parseLocalVarStatement( varStmt );
}
else if( Keyword.KW_var == keyword )
{
VarStatement varStmt = new VarStatement();
parseLocalVarStatement( varStmt );
}
else if( ';' == token.getType() )
{
_tokenizer.nextToken();
pushStatement( new NoOpStatement() );
}
else if( getGosuClass() instanceof IGosuProgram &&
getStatementDepth() == 1 &&
!isParsingBlock() &&
!((IGosuProgramInternal)getGosuClass()).isStatementsOnly() &&
(maybeAdvanceTokenizerToEndOfSavedLocation()) )
{
pushStatement( new NoOpStatement() );
}
else if( !isParsingFunction() && parseFunctionDefinition() )
{
return true;
}
else if( !isParsingFunction() && parsePropertyDefinition() )
{
return true;
}
else if( Keyword.KW_eval == keyword )
{
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
_tokenizer.nextToken();
parseEvalExpression();
setLocation( iOffset, iLineNum, iColumn, true );
pushStatement( new EvalStatement( (EvalExpression)popExpression() ) );
}
else if( !parseAssignmentOrMethodCall() )
{
if( SourceCodeTokenizer.TT_EOF != token.getType() )
{
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
if( '}' != token.getType() &&
';' != token.getType() )
{
if( isParsingFunction() )
{
if( maybeAdvanceTokenizerToEndOfSavedLocation() )
{
pushStatement( new NoOpStatement() );
setLocation( iOffset, iLineNum, iColumn, true, true );
popStatement();
return false;
}
else if( Keyword.KW_construct == token.getKeyword() ||
Keyword.KW_function == token.getKeyword() ||
Keyword.KW_property == token.getKeyword() )
{
Statement noop = new NoOpStatement();
getTokenizer().nextToken();
eatStatementBlock( noop, Res.MSG_SYNTAX_ERROR );
pushStatement( noop );
setLocation( iOffset, iLineNum, iColumn, true, true );
popStatement();
return false;
}
}
String str = _tokenizer.getTokenAsString();
TypeLiteral type = resolveTypeLiteral( str );
_tokenizer.nextToken();
Statement noop = new NoOpStatement();
if( !(type.evaluate() instanceof ErrorType) )
{
int state = _tokenizer.mark();
int iLocationsCount = _locations.size();
parsePrimaryExpression();
Expression expression = popExpression();
if( expression instanceof Identifier && match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
expression.clearParseExceptions();
parseExpression(); //bad rhs
popExpression();
verify( noop, false, Res.MSG_JAVA_STYLE_VARIABLE_DECLARATION, str );
}
else
{
_tokenizer.restoreToMark( state );
removeLocationsFrom( iLocationsCount );
verify( noop, false, Res.MSG_UNEXPECTED_TOKEN, str );
}
}
else
{
verify( noop, false, Res.MSG_UNEXPECTED_TOKEN, str );
}
pushStatement( noop );
}
else if( getStatementDepth() == 1 &&
!isParsingBlock() &&
getGosuClass() instanceof IGosuProgram &&
((IGosuProgramInternal)getGosuClass()).isParsingExecutableProgramStatements() )
{
Statement noop = new NoOpStatement();
verify( noop, false, Res.MSG_UNEXPECTED_TOKEN, _tokenizer.getTokenAsString() );
pushStatement( noop );
_tokenizer.nextToken();
}
else
{
return false;
}
}
else
{
return false;
}
}
return true;
}
private void parseAssertStatement()
{
AssertStatement assertStmt = new AssertStatement();
if( verify( assertStmt, getGosuClass() instanceof IGosuClassInternal, Res.MSG_ASSERTIONS_NOT_ALLOWED_HERE ) )
{
((IGosuClassInternal)getGosuClass()).setHasAssertions( true );
}
parseExpression( ContextType.pBOOLEAN_FALSE );
Expression condition = popExpression();
if( !verify( condition, !(condition instanceof NotAWordExpression), Res.MSG_EXPECTING_CONDITION_FOR_ASSERT ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
condition.removeParseException( Res.MSG_SYNTAX_ERROR );
}
assertStmt.setCondition( condition );
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseExpression( ContextType.OBJECT_FALSE );
Expression detail = popExpression();
if( !verify( detail, !(detail instanceof NotAWordExpression), Res.MSG_EXPECTING_MESSAGE_FOR_ASSERT ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
detail.removeParseException( Res.MSG_SYNTAX_ERROR );
}
assertStmt.setDetail( detail );
}
pushStatement( assertStmt );
}
private boolean areUsingStatementsAllowedInStatementLists()
{
if( _bAreUsingStatementsAllowedInStatementLists == null )
{
_bAreUsingStatementsAllowedInStatementLists=
getGosuClass() == null ||
(getGosuClass() instanceof IGosuProgramInternal && ((IGosuProgramInternal)getGosuClass()).allowsUses()) ||
CommonServices.getEntityAccess().areUsesStatementsAllowedInStatementLists( getGosuClass() );
}
return _bAreUsingStatementsAllowedInStatementLists;
}
private int getStatementDepth()
{
return _iStmtDepth;
}
private void incStatementDepth()
{
_iStmtDepth++;
}
private void decStatementDepth()
{
_iStmtDepth
}
void parseLocalVarStatement( VarStatement varStmt )
{
Token t = new Token();
verify( varStmt, match( t, Keyword.KW_var ), Res.MSG_EXPECTING_VAR_STMT );
int iNameOffset = getTokenizer().getTokenStart();
if( verify( varStmt, match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_VAR ) )
{
varStmt.setNameOffset( iNameOffset, t._strValue );
warn( varStmt, !Keyword.isKeyword( t._strValue ), Res.MSG_IMPROPER_USE_OF_KEYWORD, t._strValue );
}
else
{
t._strValue = null;
}
parseVarStatement( varStmt, t, false );
}
void parseVarStatement( VarStatement varStmt, Token idToken, boolean bClassMember )
{
String strIdentifier = idToken._strValue == null ? "" : idToken._strValue;
warn( varStmt, !Keyword.isKeyword( strIdentifier ), Res.MSG_IMPROPER_USE_OF_KEYWORD, strIdentifier );
Token priorToken = getTokenizer().getPriorToken();
boolean bZeroLength = strIdentifier.length() <= 0;
addNameInDeclaration( strIdentifier,
bZeroLength ? priorToken.getTokenEnd() : idToken._iDocPosition,
priorToken.getLine(), priorToken.getTokenColumn(), !bZeroLength );
ISymbol existingSymbol = _symTable.getSymbol( strIdentifier );
if( (isParsingBlock() || getParsingAnonymousClass() != null) && !isParsingAnnotation() )
{
existingSymbol = captureSymbol( getCurrentEnclosingGosuClass(), strIdentifier, null );
}
boolean bFieldSymbolFromProgram = false;
if( !bClassMember && existingSymbol != null )
{
bFieldSymbolFromProgram = getGosuClass() instanceof IGosuProgram &&
(bClassMember || isLocalVarTopLevelFunctionBodyStmt()) &&
((IGosuProgramInternal)getGosuClass()).isParsingExecutableProgramStatements() &&
(existingSymbol instanceof DynamicSymbol );
if( verify( varStmt, bFieldSymbolFromProgram, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier ) )
{
// Overwrite program var's symbol with class field symbol
varStmt.setSymbol( existingSymbol );
}
}
if( varStmt.getModifierInfo() == null )
{
varStmt.setModifierInfo( new ModifierInfo(0) );
}
TypeLiteral typeLiteral = null;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
}
else if( !match( null, "=", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
if( match( null, null, '(', true ) )
{
parseBlockLiteral();
typeLiteral = (TypeLiteral)popExpression();
}
}
if( !bClassMember && getGosuClass() instanceof IGosuProgram &&
getGosuClass().getMemberField( strIdentifier ) != null )
{
// Eat the property alias if this is a program field (the class parser already made the property)
if( match( null, Keyword.KW_as ) )
{
match( null, Keyword.KW_readonly );
match( null, SourceCodeTokenizer.TT_WORD );
}
}
varStmt.setTypeLiteral( typeLiteral );
ISymbol symbol = null;
Expression eas = null;
if( match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
pushParsingFieldInitializer( varStmt );
putThisAndSuperSymbols( varStmt.getModifierInfo() ); // assume caller must pushes scope
if( existingSymbol == null &&
!bClassMember && !bFieldSymbolFromProgram &&
typeLiteral != null && typeLiteral.getType().getType() instanceof IBlockType )
{
// need symbol assigned now so a local var that is a block can be recursive: var foo(i:int):int = i > 0 ? foo(i-1) : 0
Symbol newSym = new Symbol( strIdentifier, typeLiteral.getType().getType(), _symTable, null );
newSym.setModifierInfo( varStmt.getModifierInfo() );
symbol = newSym;
varStmt.setSymbol( newSym );
_symTable.putSymbol( symbol );
}
try
{
parseExpression( typeLiteral == null ? ContextType.EMPTY : new ContextType( typeLiteral.getType().getType() ) );
}
finally
{
popParsingFieldInitializer();
}
eas = popExpression();
if( eas.hasParseExceptions() )
{
if( typeLiteral != null )
{
IType typeCast = typeLiteral.getType().getType();
eas.getParseExceptions().get( 0 ).setExpectedType( typeCast );
}
}
detectLikelyJavaCast(eas);
}
verify( varStmt, eas != null || typeLiteral != null, Res.MSG_VARIABLE_TYPE_OR_VALUE_REQUIRED );
if( typeLiteral != null )
{
verify( typeLiteral, typeLiteral.getType().getType() != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
}
// Create a local symbol for the identifier part of the 'var' statement
IType type = null;
if( eas != null )
{
type = eas.getType();
if( type == null )
{
type = ErrorType.getInstance();
}
}
if( typeLiteral != null )
{
type = typeLiteral.getType().getType();
}
else if( bClassMember && eas != null )
{
IPropertyInfo varProperty = getGosuClass().getTypeInfo().getProperty(getGosuClass(), strIdentifier);
if( varProperty instanceof IGosuVarPropertyInfo )
{
// Assign inferred type to var property corresponding with this field
IGosuVarPropertyInfo vpi = (IGosuVarPropertyInfo) varProperty;
vpi.assignActualType( eas.getType() );
vpi.assignSymbolType( eas.getType() );
}
}
verify( varStmt, !JavaTypes.pVOID().equals(type) && !JavaTypes.VOID().equals( type ), Res.MSG_VARIABLE_MUST_HAVE_NON_NULL_TYPE );
//if no type was found, we have added an error so give the symbol the error type
if( type == null )
{
type = ErrorType.getInstance();
}
if( symbol == null )
{
if( bClassMember || bFieldSymbolFromProgram )
{
symbol = varStmt.getSymbol();
symbol.setType( type );
varStmt.setType( type );
}
else
{
Symbol newSym = new Symbol( strIdentifier, type, _symTable, null );
newSym.setModifierInfo( varStmt.getModifierInfo() );
symbol = newSym;
varStmt.setSymbol( newSym );
}
if( existingSymbol == null )
{
_symTable.putSymbol( symbol );
}
}
eas = possiblyWrapWithImplicitCoercion( eas, type );
varStmt.setAsExpression( eas );
varStmt.setScriptPart( getScriptPart() );
varStmt.setDefinitionParsed( true );
boolean bHideFieldForEditorParsing = getGosuClass() != null && getGosuClass().isCreateEditorParser() && bFieldSymbolFromProgram;
if( bHideFieldForEditorParsing )
{
pushStatement( new HideFieldNoOpStatement( varStmt ) );
}
else
{
pushStatement( varStmt );
}
}
private boolean isLocalVarTopLevelFunctionBodyStmt() {
if( _symTable.getScopeCount() > 1 ) {
return _symTable.peekScope( 1 ).getActivationCtx() != null;
}
return false;
}
private void detectLikelyJavaCast(Expression eas) {
if( eas instanceof IParenthesizedExpression || (eas instanceof IImplicitTypeAsExpression && ((IImplicitTypeAsExpression)eas).getLHS() instanceof IParenthesizedExpression) ) {
IParenthesizedExpression parenExpr;
if(eas instanceof IParenthesizedExpression) {
parenExpr = (IParenthesizedExpression)eas;
} else {
parenExpr = (IParenthesizedExpression)((IImplicitTypeAsExpression)eas).getLHS();
}
if (parenExpr.getExpression() instanceof TypeLiteral) {
IType castType = eas.getType();
if(castType instanceof IMetaType) {
castType = ((IMetaType)castType).getType();
}
eas.addParseWarning( new ParseWarning( makeFullParserState(), Res.MSG_LIKELY_JAVA_CAST, castType.getName() ) );
}
}
}
private boolean recoverFromJavaStyleCast( Expression eas )
{
if( eas instanceof IParenthesizedExpression &&
((IParenthesizedExpression)eas).getExpression() instanceof TypeLiteral )
{
if( getTokenizer().getLineNumber() == eas.getLocation().getLineNum() )
{
// Something follows a java-style-cast-looking expression on same line, so assume it's a java-style cast...
IType castType = eas.getType();
if( castType instanceof IMetaType ){
castType = ((IMetaType) castType).getType();
}
int mark = getTokenizer().mark();
parseUnaryExpression();
Expression maybeRealEas = popExpression();
if( maybeRealEas.hasParseExceptions() )
{
_locations.remove( maybeRealEas.getLocation() );
getTokenizer().restoreToMark( mark );
}
else
{
popExpression();
eas.addParseWarning( new ParseWarning( makeFullParserState(), Res.MSG_LIKELY_JAVA_CAST, castType.getName() ) );
eas = possiblyWrapWithImplicitCoercion( maybeRealEas, castType );
pushExpression( eas );
return true;
}
}
}
return false;
}
DynamicPropertySymbol parseVarPropertyClause( VarStatement varStmt, ModifierInfo modifiers, String strVarIdentifier, IType varType, boolean parseInitializer )
{
if( !match( null, Keyword.KW_as ) )
{
return null;
}
boolean bReadonly = match( null, Keyword.KW_readonly ) || varStmt.isFinal();
final int iNameStart = getTokenizer().getTokenStart();
Token T = new Token();
varStmt.setHasProperty( true );
String strPropertyName = null;
if( verify( varStmt, match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_PROPERTY ) )
{
strPropertyName = T._strValue;
warn( varStmt, !Keyword.isKeyword( strPropertyName ), Res.MSG_IMPROPER_USE_OF_KEYWORD, strPropertyName );
}
String strIdentifier = strPropertyName == null ? "" : strPropertyName;
Token restoreState = getTokenizer().getPriorToken();
addNameInDeclaration( strIdentifier,
T._iDocPosition,
restoreState.getLine(), restoreState.getTokenColumn(), strIdentifier.length() > 0 );
varStmt.setPropertyName( strPropertyName );
varStmt.setNameOffset(iNameStart, strPropertyName );
ISymbol symbol = getSymbolTable().getSymbol( strPropertyName );
if( symbol != null && !symbol.getDisplayName().equals( strPropertyName ) )
{
// Force case sensitivity, mainly to make overrides consistent
symbol = null;
}
if( symbol instanceof DynamicPropertySymbol &&
symbol.getGosuClass() == getGosuClass() &&
(((DynamicPropertySymbol)symbol).getVarIdentifier() != null &&
!((DynamicPropertySymbol)symbol).getVarIdentifier().equals( strVarIdentifier) ) )
{
varStmt.addParseException( new ParseException( makeFullParserState(), Res.MSG_PROPERTY_ALREADY_DEFINED, strPropertyName) );
}
ICompilableType gsClass = getGosuClass();
ModifierInfo propModifiers = new ModifierInfo( Modifier.PUBLIC | (varStmt.isStatic() ? Modifier.STATIC : 0) );
propModifiers.setAnnotations( modifiers.getAnnotations() );
propModifiers.setDescription( modifiers.getDescription() );
DynamicPropertySymbol dps = makeGetter( varStmt, strVarIdentifier, strPropertyName, varType, propModifiers, symbol, gsClass, true );
if( !bReadonly )
{
makeSetter( varStmt, strVarIdentifier, strPropertyName, varType, propModifiers, symbol, gsClass, dps, true );
}
dps.setScriptPart( getOwner().getScriptPart() );
dps.setVarIdentifier( strVarIdentifier );
dps.getModifierInfo().setDescription( propModifiers.getDescription() );
varStmt.setProperty( dps );
if( parseInitializer && match( null, "=", SourceCodeTokenizer.TT_OPERATOR, false ) )
{
pushParsingFieldInitializer( varStmt );
try
{
parseExpression( new ContextType( varType ) );
}
finally
{
popParsingFieldInitializer();
}
Expression expression = popExpression();
verifyComparable( varType, expression );
expression = possiblyWrapWithImplicitCoercion( expression, varType );
varStmt.setAsExpression( expression );
}
return dps;
}
DynamicPropertySymbol makeProperties( VarStatement varStmt, String strVarIdentifier, String strPropertyName, IType varType, ModifierInfo modifiers, boolean bGetter, boolean bSetter )
{
varStmt.setHasProperty( true );
varStmt.setPropertyName( strPropertyName );
ISymbol symbol = getSymbolTable().getSymbol( strPropertyName );
if( symbol != null && !symbol.getDisplayName().equals( strPropertyName ) )
{
// Force case sensitivity, mainly to make overrides consistent
symbol = null;
}
if( symbol instanceof DynamicPropertySymbol &&
symbol.getGosuClass() == getGosuClass() &&
(((DynamicPropertySymbol)symbol).getVarIdentifier() != null &&
!((DynamicPropertySymbol)symbol).getVarIdentifier().equals( strVarIdentifier) ) )
{
varStmt.addParseException( new ParseException( makeFullParserState(), Res.MSG_PROPERTY_ALREADY_DEFINED, strPropertyName ) );
}
ICompilableType gsClass = getGosuClass();
DynamicPropertySymbol dps = null;
if( bGetter )
{
dps = makeGetter( varStmt, strVarIdentifier, strPropertyName, varType, modifiers, symbol, gsClass, false );
}
if( bSetter )
{
dps = makeSetter( varStmt, strVarIdentifier, strPropertyName, varType, modifiers, symbol, gsClass, dps, false );
}
dps.setScriptPart( getOwner().getScriptPart() );
dps.setVarIdentifier( strVarIdentifier );
return dps;
}
private DynamicPropertySymbol makeGetter( VarStatement varStmt, String strVarIdentifier, String strPropertyName, IType varType, ModifierInfo modifiers, ISymbol symbol, ICompilableType gsClass, boolean bOldSyntax )
{
DynamicPropertySymbol dps;
if( symbol instanceof DynamicPropertySymbol && symbol.getGosuClass() != null && symbol.getGosuClass().isAssignableFrom( gsClass ) )
{
dps = new DynamicPropertySymbol( (DynamicPropertySymbol)symbol );
dps.getModifierInfo().setDescription( modifiers.getDescription() );
if( dps.getGetterDfs() == null || dps.getGetterDfs().getScriptPart().getContainingType() != gsClass )
{
VarPropertyGetFunctionSymbol getFunctionSymbol = new VarPropertyGetFunctionSymbol( gsClass, getSymbolTable(), strPropertyName, strVarIdentifier, varType );
transferModifierInfo( varStmt, modifiers, AnnotationUseSiteTarget.get, getFunctionSymbol );
getFunctionSymbol.getModifierInfo().removeModifiers( Modifier.OVERRIDE );
getFunctionSymbol.setClassMember( true );
if( dps.getGetterDfs() != null )
{
warn( varStmt, bOldSyntax || dps.getGetterDfs().isFinal() || Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_MISSING_OVERRIDE_MODIFIER, dps.getDisplayName(), dps.getGetterDfs().getGosuClass().getName() );
getFunctionSymbol.setOverride( true );
getFunctionSymbol.setSuperDfs( dps.getGetterDfs() );
}
dps.setGetterDfs( getFunctionSymbol );
verifyFunction( getFunctionSymbol, varStmt );
}
}
else
{
VarPropertyGetFunctionSymbol getFunctionSymbol = new VarPropertyGetFunctionSymbol( gsClass, getSymbolTable(), strPropertyName, strVarIdentifier, varType );
transferModifierInfo( varStmt, modifiers, AnnotationUseSiteTarget.get, getFunctionSymbol );
getFunctionSymbol.setClassMember( true );
verifyFunction( getFunctionSymbol, varStmt );
dps = new DynamicPropertySymbol( getFunctionSymbol, true );
dps.getModifierInfo().setDescription( modifiers.getDescription() );
}
return dps;
}
private DynamicPropertySymbol makeSetter( VarStatement varStmt, String strVarIdentifier, String strPropertyName, IType varType, ModifierInfo modifiers, ISymbol symbol, ICompilableType gsClass, DynamicPropertySymbol dps, boolean bOldSyntax )
{
if( dps != null )
{
symbol = dps;
}
if( symbol instanceof DynamicPropertySymbol && symbol.getGosuClass() != null && symbol.getGosuClass().isAssignableFrom( gsClass ) )
{
if( dps == null )
{
dps = new DynamicPropertySymbol( (DynamicPropertySymbol)symbol );
}
if( dps.getSetterDfs() == null || dps.getSetterDfs().getScriptPart().getContainingType() != gsClass )
{
VarPropertySetFunctionSymbol setFunctionSymbol = new VarPropertySetFunctionSymbol( gsClass, getSymbolTable(), strPropertyName, strVarIdentifier, varType );
transferModifierInfo( varStmt, modifiers, AnnotationUseSiteTarget.set, setFunctionSymbol );
setFunctionSymbol.getModifierInfo().removeModifiers( Modifier.OVERRIDE );
setFunctionSymbol.setClassMember( true );
if( dps.getSetterDfs() != null )
{
warn( varStmt, bOldSyntax || dps.getSetterDfs().isFinal() || Modifier.isOverride( modifiers.getModifiers() ) || varStmt.hasParseException( Res.MSG_MISSING_OVERRIDE_MODIFIER ), Res.MSG_MISSING_OVERRIDE_MODIFIER, dps.getDisplayName(), dps.getSetterDfs().getGosuClass().getName() );
setFunctionSymbol.setOverride( true );
setFunctionSymbol.setSuperDfs( dps.getSetterDfs() );
}
dps.setSetterDfs( setFunctionSymbol );
verifyFunction( setFunctionSymbol, varStmt );
}
}
else
{
VarPropertySetFunctionSymbol setFunctionSymbol = new VarPropertySetFunctionSymbol( gsClass, getSymbolTable(), strPropertyName, strVarIdentifier, varType );
transferModifierInfo( varStmt, modifiers, AnnotationUseSiteTarget.set, setFunctionSymbol );
setFunctionSymbol.setClassMember( true );
verifyFunction( setFunctionSymbol, varStmt );
dps = new DynamicPropertySymbol( setFunctionSymbol, false );
}
return dps;
}
private void transferModifierInfo( VarStatement varStmt, ModifierInfo modifiers, AnnotationUseSiteTarget target, DynamicFunctionSymbol dfs )
{
dfs.getModifierInfo().setDescription( modifiers.getDescription() );
dfs.getModifierInfo().setModifiers( modifiers.getModifiers() );
for( Iterator<IGosuAnnotation> iter = modifiers.getAnnotations().iterator(); iter.hasNext(); )
{
IGosuAnnotation anno = iter.next();
if( anno.getTarget() == target || anno.getTarget() == AnnotationUseSiteTarget.accessors || anno.getTarget() == null )
{
// 'set' or 'get' or 'accessors' target type, apply annotation exclusively to get and/or set method
if( appliesToElementType( anno, ElementType.METHOD ) )
{
verify( varStmt, !anno.isJavaAnnotation() || !GosuClassParser.violatesRepeatable( dfs.getModifierInfo().getAnnotations(), anno ), Res.MSG_TOO_MANY_ANNOTATIONS, anno.getName(), target.name() );
if( anno.getType() == JavaTypes.TARGET_MODIFIER() )
{
setFromTargetModifier( anno, dfs.getModifierInfo() );
}
else
{
dfs.getModifierInfo().addAnnotation( anno );
if( anno.getTarget() == target ||
(anno.getTarget() == AnnotationUseSiteTarget.accessors && target == AnnotationUseSiteTarget.set) )
{
// annotation exclusively targets get or set or accessors, remove it from further applications
iter.remove();
}
}
}
}
else if( anno.getTarget() == AnnotationUseSiteTarget.param && target == AnnotationUseSiteTarget.set )
{
// annotation targets 'param', apply exclusively to set method's param
if( appliesToElementType( anno, ElementType.PARAMETER ) )
{
if( !dfs.getArgs().isEmpty() )
{
ModifierInfo paramModifiers = (ModifierInfo)dfs.getArgs().get( 0 ).getModifierInfo();
verify( varStmt, !anno.isJavaAnnotation() || !GosuClassParser.violatesRepeatable( paramModifiers.getAnnotations(), anno ), Res.MSG_TOO_MANY_ANNOTATIONS, anno.getName(), Keyword.KW_param.getName() );
paramModifiers.addAnnotation( anno );
}
// annotation exclusively targets param, remove it from further applications
iter.remove();
}
}
}
}
private void transferModifierInfo( ParsedElement stmt, ModifierInfo modifiers, AnnotationUseSiteTarget target, EnhancementDynamicFunctionSymbol dfs )
{
ModifierInfo paramModifiers = dfs.getReceiver().getModifierInfo();
// Note, we remove existing so as not to duplicate annos.
// Also note, the annos added during decl time don't have arg expression, those are parsed only during
// defn time, thus we have to re-add them here so we have the full annotation expression with args.
paramModifiers.setAnnotations( Collections.emptyList() );
for( Iterator<IGosuAnnotation> iter = modifiers.getAnnotations().iterator(); iter.hasNext(); )
{
IGosuAnnotation anno = iter.next();
if( anno.getTarget() == AnnotationUseSiteTarget.receiver && target == AnnotationUseSiteTarget.receiver )
{
// annotation targets 'receiver', apply exclusively to enhancement method
if( appliesToElementType( anno, ElementType.PARAMETER ) )
{
if( stmt != null )
{
verify( stmt, !anno.isJavaAnnotation() || !GosuClassParser.violatesRepeatable( paramModifiers.getAnnotations(), anno ), Res.MSG_TOO_MANY_ANNOTATIONS, anno.getName(), Keyword.KW_receiver.getName() );
}
paramModifiers.addAnnotation( anno );
// annotation exclusively targets param, remove it from further applications
iter.remove();
}
}
}
}
static void setFromTargetModifier( IGosuAnnotation anno, ModifierInfo modifierInfo )
{
String mod = (String)((AnnotationExpression)anno.getExpression()).getArgs()[0].evaluate();
int modifier;
switch( mod )
{
case "private":
modifier = Modifier.PRIVATE;
break;
case "internal":
modifier = Modifier.INTERNAL;
break;
case "protected":
modifier = Modifier.PROTECTED;
break;
case "public":
modifier = Modifier.PUBLIC;
break;
case "final":
modifier = Modifier.FINAL;
break;
default:
throw new IllegalStateException();
}
if( modifier != Modifier.FINAL )
{
modifierInfo.removeModifiers( Modifier.PRIVATE | Modifier.INTERNAL | Modifier.PROTECTED | Modifier.PUBLIC );
}
modifierInfo.addModifiers( modifier );
}
private boolean appliesToElementType( IGosuAnnotation anno, ElementType elemType )
{
IAnnotationInfo annotation = anno.getType().getTypeInfo().getAnnotation( JavaTypes.TARGET() );
if( annotation == null )
{
return true;
}
Object targetValue = annotation.getFieldValue( "value" );
if( targetValue == elemType ||
targetValue instanceof String && targetValue.equals( elemType.name() ) )
{
return true;
}
else if( targetValue instanceof Object[] )
{
Object[] values = (Object[])targetValue;
if( values == null || values.length == 0 )
{
return true;
}
for( Object value : values )
{
if( value == elemType || value.equals( elemType.name() ) )
{
return true;
}
}
}
return false;
}
void parseDelegateStatement( DelegateStatement delegateStmt, String strIdentifier )
{
if( delegateStmt.getModifierInfo() == null )
{
delegateStmt.setModifierInfo( new ModifierInfo(0) );
}
TypeLiteral typeLiteral = null;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
warn( delegateStmt, typeLiteral.getType().getType() != null &&
!typeLiteral.getType().getType().equals( getCurrentEnclosingGosuClass() ),
Res.MSG_DELEGATES_SHOULD_NOT_SELF_DELEGATE );
}
ICompilableType gsClass = getGosuClass();
List<IType> constituents = new ArrayList<>();
if( verify( delegateStmt, match( null, Keyword.KW_represents ), Res.MSG_EXPECTING_REPRESENTS ) )
{
do
{
getOwner().parseTypeLiteral();
TypeLiteral ifaceLiteral = (TypeLiteral)popExpression();
IType iface = ifaceLiteral.getType().getType();
if( !(iface instanceof ErrorType) )
{
verify( ifaceLiteral, iface.isInterface() && !iface.isCompoundType(), Res.MSG_DELEGATES_REPRESENT_INTERFACES_ONLY );
verify( ifaceLiteral, TypeLord.isDelegatableInterface( gsClass, iface ), Res.MSG_CLASS_DOES_NOT_IMPL, iface );
verify( typeLiteral, typeLiteral == null || TypeLord.isDelegatableInterface( typeLiteral.getType().getType(), iface ), Res.MSG_CLASS_DOES_NOT_IMPL, iface );
}
constituents.add( iface );
} while( match( null, ',' ) );
}
delegateStmt.setConstituents( constituents );
Expression eas = null;
if( match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
pushParsingFieldInitializer( delegateStmt );
try
{
parseExpression( typeLiteral == null ? ContextType.EMPTY : new ContextType( typeLiteral.getType().getType() ) );
}
finally
{
popParsingFieldInitializer();
}
eas = popExpression();
if( eas.hasParseExceptions() )
{
if( typeLiteral != null )
{
IType typeCast = typeLiteral.getType().getType();
eas.getParseExceptions().get( 0 ).setExpectedType( typeCast );
}
}
if( eas instanceof IParenthesizedExpression &&
((IParenthesizedExpression)eas).getExpression() instanceof TypeLiteral )
{
IType castType = eas.getType();
if(castType instanceof IMetaType) {
castType = ((IMetaType)castType).getType();
}
eas.addParseWarning( new ParseWarning( makeFullParserState(), Res.MSG_LIKELY_JAVA_CAST, castType.getName() ) );
}
}
// Create a local symbol for the identifier part of the 'var' statement
IType type = null;
if( eas != null )
{
type = eas.getType();
if( type == null )
{
type = ErrorType.getInstance();
}
}
if( typeLiteral != null )
{
IType typeCast = typeLiteral.getType().getType();
if( eas != null && type != null )
{
verifyComparable( typeCast, eas, false, true );
}
type = typeCast;
}
else if( !(type instanceof ErrorType) )
{
if( constituents.isEmpty() )
{
type = ErrorType.getInstance();
}
else if( constituents.size() == 1 )
{
type = constituents.get( 0 );
}
else
{
type = CompoundType.get( new HashSet<>( constituents ) );
}
}
//if no type was found, we have added an error so give the symbol the error type
if( type == null )
{
type = ErrorType.getInstance();
}
ISymbol symbol = delegateStmt.getSymbol();
if( symbol == null )
{
symbol = new Symbol( strIdentifier, type, _symTable );
delegateStmt.setSymbol( symbol );
}
else
{
symbol.setType( type );
}
_symTable.putSymbol( symbol );
delegateStmt.setType( type );
eas = possiblyWrapWithImplicitCoercion( eas, type );
delegateStmt.setAsExpression( eas );
delegateStmt.setTypeLiteral( typeLiteral );
delegateStmt.setScriptPart( getScriptPart() );
pushStatement( delegateStmt );
}
private void parseSwitchStatement()
{
SwitchStatement switchStmt = new SwitchStatement();
verify( switchStmt, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_SWITCH );
parseExpression();
verify( switchStmt, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_SWITCH );
verify( switchStmt, match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_SWITCH );
Expression e = popExpression();
switchStmt.setSwitchExpression( e );
_iBreakOk++;
try
{
parseCaseClauses( switchStmt );
parseDefaultClause( switchStmt, Arrays.asList( switchStmt.getCases() ) );
}
finally
{
_iBreakOk
}
verify( switchStmt, match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_SWITCH );
pushStatement( switchStmt );
}
private void parseDoWhileStatement()
{
DoWhileStatement whileStmt = new DoWhileStatement();
_ctxInferenceMgr.pushLoopCompromised();
boolean loopStmtParsed;
try
{
loopStmtParsed = parseLoopStatement();
}
finally
{
_ctxInferenceMgr.popLoopCompromised();
}
if( verify( whileStmt, loopStmtParsed, Res.MSG_EXPECTING_STATEMENT ) )
{
verify( whileStmt, match( null, Keyword.KW_while ), Res.MSG_EXPECTING_WHILE_DO );
Statement stmt = popStatement();
verify( whileStmt, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_IF );
parseExpression( ContextType.pBOOLEAN_FALSE );
// Bad assignment statement in if clause (mistaken for equality "==")
if( match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseExpression();
popExpression();
verify( whileStmt, false, Res.MSG_ASSIGNMENT_IN_LOOP_STATEMENT);
}
verify( whileStmt, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_IF );
Expression e = popExpression();
whileStmt.setExpression( e );
verifyLoopConditionNotAlwaysFalse( e );
whileStmt.setStatement( stmt );
}
pushStatement( whileStmt );
}
private void verifyLoopConditionNotAlwaysFalse( Expression e )
{
verify( e, !e.isCompileTimeConstant() || e.hasParseExceptions() || (boolean)e.evaluate(),
Res.MSG_CONDITION_IS_ALWAYS_TRUE_FALSE, false );
}
private void parseWhileStatement()
{
WhileStatement whileStmt = new WhileStatement();
_ctxInferenceMgr.pushLoopCompromised();
verify( whileStmt, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_WHILE );
parseExpression( ContextType.pBOOLEAN_FALSE );
// Bad assignment statement in if clause (mistaken for equality "==")
if( match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseExpression();
popExpression();
verify( whileStmt, false, Res.MSG_ASSIGNMENT_IN_LOOP_STATEMENT);
}
verify( whileStmt, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_WHILE );
Expression e = popExpression();
_ctxInferenceMgr.pushLastCtx();
try
{
whileStmt.setExpression( e );
verifyLoopConditionNotAlwaysFalse( e );
if( verify( whileStmt, parseLoopStatement(), Res.MSG_EXPECTING_STATEMENT ) )
{
Statement stmt = popStatement();
whileStmt.setStatement( stmt );
}
pushStatement( whileStmt );
}
finally
{
_ctxInferenceMgr.popCtx( false );
_ctxInferenceMgr.popLoopCompromised();
}
}
private void parseForEachStatement()
{
Token t = new Token();
ForEachStatement forEachStmt = new ForEachStatement( _symTable );
verify( forEachStmt, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_FE );
boolean bLoneInterval = false;
Expression ein = null;
if( !match( null, Keyword.KW_var ) )
{
// Handle case where no loop var is provided e.g., for( 1..3 ) ... similar to ruby's 3.times( ... )
int state = _tokenizer.mark();
int iLocationsCount = _locations.size();
parseExpression();
ein = popExpression();
bLoneInterval = JavaTypes.NUMBER_INTERVAL().isAssignableFrom( ein.getType() );
if( !bLoneInterval )
{
_tokenizer.restoreToMark( state );
removeLocationsFrom( iLocationsCount );
}
}
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( !bLoneInterval && verify( forEachStmt, match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_FOREACH ) )
{
forEachStmt.setNameOffset( iOffset, t._strValue );
warn( forEachStmt, !Keyword.isKeyword( t._strValue ), Res.MSG_IMPROPER_USE_OF_KEYWORD, t._strValue );
}
else
{
t._strValue = null;
}
_symTable.pushScope();
try
{
String strIdentifier = t._strValue == null ? "" : t._strValue;
// var decl *expression* is only for editors
LocalVarDeclaration varDecl = null;
if( strIdentifier.length() > 0 )
{
varDecl = new LocalVarDeclaration( strIdentifier );
verify( forEachStmt, _symTable.getSymbol( strIdentifier ) == null, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
varDecl.setType( ErrorType.getInstance() ); // this is updated below
pushExpression( varDecl );
setLocation( iOffset, iLineNum, iColumn, strIdentifier == null, true );
popExpression();
}
if( !bLoneInterval )
{
verify( forEachStmt, match( null, Keyword.KW_in ), Res.MSG_EXPECTING_IN_FOREACH );
parseExpression();
ein = popExpression();
}
IType typeIn = ein.getType();
verify( ein, LoopStatement.isIteratorType( typeIn ) || typeIn instanceof ErrorType,
Res.MSG_EXPECTING_ARRAYTYPE_FOREACH, typeIn.getName() );
forEachStmt.setInExpression( ein );
forEachStmt.setStructuralIterable( StandardCoercionManager.isStructurallyAssignable_Laxed( JavaTypes.ITERABLE(), typeIn ) );
if( strIdentifier != null )
{
// Create a temporary symbol for the identifier part of the foreach statement
IType typeIdentifier = LoopStatement.getArrayComponentType( typeIn );
if( strIdentifier.length() > 0 )
{
varDecl.setType( typeIdentifier );
}
Symbol symbol = new Symbol( bLoneInterval ? ("_unused_loop_var_" + iOffset) : strIdentifier, typeIdentifier, _symTable, null );
_symTable.putSymbol( symbol );
forEachStmt.setIdentifier( symbol );
}
boolean foundIterator = false;
if( match( null, Keyword.KW_iterator ) )
{
foundIterator = true;
parseIteratorVar( forEachStmt, typeIn );
}
if( match( null, Keyword.KW_index ) )
{
parseIndexVar( forEachStmt );
}
if( !foundIterator && match( null, Keyword.KW_iterator ) )
{
foundIterator = true;
parseIteratorVar( forEachStmt, typeIn );
}
if( bLoneInterval && foundIterator )
{
addError( forEachStmt, Res.MSG_FOREACH_ITERATOR_NOT_ALLOWED );
}
verify( forEachStmt, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_FE );
if( strIdentifier != null )
{
_ctxInferenceMgr.pushLoopCompromised();
boolean bHasBody;
try
{
bHasBody = parseLoopStatement();
}
finally
{
_ctxInferenceMgr.popLoopCompromised();
}
verify( forEachStmt, bHasBody, Res.MSG_EXPECTING_STATEMENT );
if( bHasBody )
{
Statement stmt = popStatement();
forEachStmt.setStatement( stmt );
}
}
pushStatement( forEachStmt );
}
finally
{
_symTable.popScope();
}
}
private void parseIndexVar( ForEachStatement forEachStmt ) {
int iOffset;
int iLineNum;
int iColumn;
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getTokenColumn();
Token Tindex = new Token();
if( verify( forEachStmt, match( Tindex, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_FOREACH_INDEX ) )
{
warn( forEachStmt, !Keyword.isKeyword( Tindex._strValue ), Res.MSG_IMPROPER_USE_OF_KEYWORD, Tindex._strValue );
}
String strIndexIdentifier = Tindex._strValue;
// index decl *expression* is only for editors
LocalVarDeclaration varIndexDecl = new LocalVarDeclaration( strIndexIdentifier == null ? "#err" : strIndexIdentifier );
varIndexDecl.setType( JavaTypes.pINT() );
verify( forEachStmt, _symTable.getSymbol( strIndexIdentifier ) == null, Res.MSG_VARIABLE_ALREADY_DEFINED, strIndexIdentifier );
forEachStmt.setIndexNameOffset( Tindex.getTokenStart() );
// Create a temporary symbol for the identifier part of the foreach statement's index
Symbol indexIdentifier = new TypedSymbol( strIndexIdentifier, JavaTypes.pINT(), _symTable, null, SymbolType.FOREACH_VARIABLE );
indexIdentifier.setFinal( true );
_symTable.putSymbol( indexIdentifier );
forEachStmt.setIndexIdentifier( indexIdentifier );
pushExpression( varIndexDecl );
setLocation( iOffset, iLineNum, iColumn, strIndexIdentifier == null, false );
popExpression();
}
private void parseIteratorVar( ForEachStatement forEachStmt, IType type )
{
int iOffset;
int iLineNum;
int iColumn;
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getTokenColumn();
Token Titer = new Token();
if( verify( forEachStmt, match( Titer, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_FOREACH_ITERATOR ) )
{
warn( forEachStmt, !Keyword.isKeyword( Titer._strValue ), Res.MSG_IMPROPER_USE_OF_KEYWORD, Titer._strValue );
}
String strIterIdentifier = Titer._strValue;
boolean bIterable = JavaTypes.ITERABLE().isAssignableFrom( type ) || forEachStmt.isStructuralIterable();
verify( forEachStmt, bIterable, Res.MSG_ITERATOR_SYMBOL_ONLY_SUPPORTED_ON_ITERABLE_OBJECTS );
IType iterType;
if( bIterable )
{
IType iterParam = LoopStatement.getArrayComponentType( type );
iterType = JavaTypes.ITERATOR().getParameterizedType( iterParam );
}
else
{
iterType = ErrorType.getInstance();
}
// index decl *expression* is only for editors
LocalVarDeclaration varIteratorDecl = new LocalVarDeclaration( strIterIdentifier == null ? "#err" : strIterIdentifier );
varIteratorDecl.setType( iterType );
verify( forEachStmt, _symTable.getSymbol( strIterIdentifier ) == null, Res.MSG_VARIABLE_ALREADY_DEFINED, strIterIdentifier );
forEachStmt.setIndexNameOffset( Titer.getTokenStart() );
Symbol iteratorIdentifier = new TypedSymbol( strIterIdentifier, iterType, _symTable, null, SymbolType.FOREACH_VARIABLE );
iteratorIdentifier.setFinal( true );
_symTable.putSymbol( iteratorIdentifier );
forEachStmt.setIteratorIdentifier( iteratorIdentifier );
pushExpression( varIteratorDecl );
setLocation( iOffset, iLineNum, iColumn, strIterIdentifier == null, false );
popExpression();
}
private void parseReturnStatement()
{
ReturnStatement returnStmt = new ReturnStatement();
IType returnType = null;
if( isParsingBlock() )
{
returnType = _blockReturnTypeStack.peek();
}
else if( isParsingFunction() )
{
if( _bProgramCallFunction && getGosuClass() instanceof IGosuProgram )
{
returnType = ((IGosuProgram)getGosuClass()).getExpectedReturnType();
}
if( returnType == null )
{
returnType = peekParsingFunction().getReturnType();
}
}
else if ( isParsingProgram() )
{
returnType = peekParsingProgram().getDeclaredReturnType();
}
verify( returnStmt, _iReturnOk > 0, Res.MSG_RETURN_NOT_ALLOWED_HERRE );
if( match( null, ';' ) || match( null, null, '}', true ) )
{
boolean bShouldNotHaveReturnValue = _bProgramCallFunction || returnType == null || returnType == GosuParserTypes.NULL_TYPE();
FunctionType functionType = isParsingFunction() ? peekParsingFunction() : null;
boolean bConstructor = !bShouldNotHaveReturnValue && functionType != null && functionType.getMethodOrConstructorInfo() instanceof IConstructorInfo;
verify( returnStmt, bShouldNotHaveReturnValue || bConstructor, Res.MSG_MISSING_RETURN_VALUE );
setReturnNullExpr( returnStmt, _bProgramCallFunction );
}
else if( returnType != GosuParserTypes.NULL_TYPE() && !inConstructorCtx() )
{
parseExpression( new ContextType( returnType ) );
Expression retValue = popExpression();
if( returnType != null && !isParsingBlock() )
{
Expression actualReturnExpr = retValue;
if( retValue instanceof ImplicitTypeAsExpression )
{
actualReturnExpr = ((ImplicitTypeAsExpression)retValue).getLHS();
}
boolean bVoidReturnType = actualReturnExpr.getType() == GosuParserTypes.NULL_TYPE() &&
!(actualReturnExpr instanceof NullExpression);
if( bVoidReturnType )
{
//noinspection ThrowableResultOfMethodCallIgnored
actualReturnExpr.removeParseWarning( Res.MSG_USING_VOID_RETURN_TYPE_FROM_NON_NULL_EXPR );
verify( actualReturnExpr, !bVoidReturnType, Res.MSG_USING_VOID_RETURN_TYPE_FROM_NON_NULL_EXPR );
}
}
returnStmt.setValue( retValue );
if( (returnType == null || _bProgramCallFunction) && retValue.getType() == JavaTypes.pVOID() )
{
retValue.setType( JavaTypes.OBJECT() );
}
}
else
{
setReturnNullExpr( returnStmt, _bProgramCallFunction );
}
pushStatement( returnStmt );
}
private boolean inConstructorCtx()
{
if( isParsingConstructor() )
{
return _blocks == null || _blocks.isEmpty();
}
return false;
}
private void setReturnNullExpr( ReturnStatement returnStmt, boolean bProgramCallFunction )
{
if( bProgramCallFunction )
{
NullExpression nullExpr = new NullExpression();
nullExpr.setType( JavaTypes.OBJECT() );
returnStmt.setValue( nullExpr );
}
else
{
returnStmt.setValue( NullExpression.instance() );
}
}
private void parseThrowStatement()
{
parseExpression();
Expression e = popExpression();
if( !JavaTypes.THROWABLE().isAssignableFrom( e.getType() ) )
{
verifyComparable( JavaTypes.STRING(), e );
e = possiblyWrapWithImplicitCoercion( e, JavaTypes.STRING() );
}
ThrowStatement throwStmt = new ThrowStatement();
throwStmt.setExpression( e );
pushStatement( throwStmt );
}
private void parseTryCatchFinallyStatement()
{
Token t = new Token();
TryCatchFinallyStatement tryCatchFinallyStmt = new TryCatchFinallyStatement();
if( verify( tryCatchFinallyStmt, match( null, null, '{', true ), Res.MSG_EXPECTING_LEFTBRACE_STMTBLOCK ) )
{
parseStatement();
Statement tryStmt = popStatement();
tryCatchFinallyStmt.setTryStatement( tryStmt );
}
for( int iCatchOffset = _tokenizer.getTokenStart(),
iCatchLineNum = _tokenizer.getLineNumber(),
iCatchColumn = getTokenizer().getTokenColumn();
match( null, Keyword.KW_catch );
iCatchOffset = _tokenizer.getTokenStart(),
iCatchLineNum = _tokenizer.getLineNumber(),
iCatchColumn = getTokenizer().getTokenColumn())
{
CatchClause catchClause = new CatchClause();
_symTable.pushScope();
try
{
verify( tryCatchFinallyStmt, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_CATCH );
match( null, Keyword.KW_var );
int iOffset = _tokenizer.getTokenStart();
int iLineNum = _tokenizer.getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( verify( tryCatchFinallyStmt, match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_CATCH ) )
{
catchClause.setNameOffset( iOffset, t._strValue );
}
String strIdentifier = t._strValue;
LocalVarDeclaration localVarDecl = new LocalVarDeclaration( strIdentifier == null ? "#err" : strIdentifier );
IType iIntrinsicType;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseTypeLiteral();
TypeLiteral typeLiteral = (TypeLiteral)popExpression();
iIntrinsicType = typeLiteral.getType().getType();
verify( typeLiteral, (JavaTypes.THROWABLE().isAssignableFrom( iIntrinsicType ) && !(iIntrinsicType instanceof ITypeVariableType)) || iIntrinsicType instanceof IErrorType,
Res.MSG_NOT_A_VALID_EXCEPTION_TYPE, iIntrinsicType.getName() );
}
else
{
if( !CommonServices.getEntityAccess().getLanguageLevel().supportsNakedCatchStatements() )
{
warn( localVarDecl, false, Res.MSG_EXPLICIT_TYPE_RECOMMENDED_FOR_CATCH_STMTS );
}
iIntrinsicType = null;
}
pushExpression( localVarDecl );
localVarDecl.setType( iIntrinsicType == null ? ErrorType.getInstance() : iIntrinsicType );
setLocation( iOffset, iLineNum, iColumn, strIdentifier == null, false );
popExpression();
verify( tryCatchFinallyStmt, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_CATCH );
verify( tryCatchFinallyStmt, _symTable.getSymbol( strIdentifier ) == null, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
Symbol symbol = new TypedSymbol( strIdentifier, iIntrinsicType != null ? iIntrinsicType : JavaTypes.THROWABLE(), _symTable, null, SymbolType.CATCH_VARIABLE);
_symTable.putSymbol( symbol );
if( tryCatchFinallyStmt.getCatchStatements() != null )
{
for( CatchClause c : tryCatchFinallyStmt.getCatchStatements() )
{
IType earlierCatchType = c.getCatchType() != null ? c.getCatchType() : CatchClause.getNakedCatchExceptionType();
IType currentCatchType = iIntrinsicType != null ? iIntrinsicType : CatchClause.getNakedCatchExceptionType();
if( earlierCatchType.isAssignableFrom( currentCatchType ) )
{
verify( catchClause, false, Res.MSG_CATCH_STMT_CANNOT_EXECUTE );
}
}
}
if( verify( tryCatchFinallyStmt, match( null, null, '{', true ), Res.MSG_EXPECTING_LEFTBRACE_STMTBLOCK ) )
{
parseStatement();
Statement catchStmt = popStatement();
catchClause.init( iIntrinsicType, catchStmt, symbol );
pushStatement( catchClause );
setLocation( iCatchOffset, iCatchLineNum, iCatchColumn );
popStatement();
tryCatchFinallyStmt.addCatchClause( catchClause );
}
}
finally
{
_symTable.popScope();
}
}
if( match( null, Keyword.KW_finally ) )
{
int originaliBreakOk = _iBreakOk;
_iBreakOk = 0;
int originaliContinueOk = _iContinueOk;
_iContinueOk = 0;
int originalReturnOk = _iReturnOk;
_iReturnOk = 0;
try
{
if( verify( tryCatchFinallyStmt, match( null, null, '{', true ), Res.MSG_EXPECTING_LEFTBRACE_STMTBLOCK ) )
{
parseStatement();
Statement finallyStmt = popStatement();
tryCatchFinallyStmt.setFinallyStatement( finallyStmt );
}
}
finally
{
_iBreakOk = originaliBreakOk;
_iContinueOk = originaliContinueOk;
_iReturnOk = originalReturnOk;
}
}
if( tryCatchFinallyStmt.getCatchStatements() == null &&
tryCatchFinallyStmt.getFinallyStatement() == null )
{
tryCatchFinallyStmt.addParseException( new ParseException( makeFullParserState(), Res.MSG_CATCH_OR_FINALLY_REQUIRED ) );
}
pushStatement( tryCatchFinallyStmt );
}
private void parseIfStatement()
{
IfStatement ifStmt = new IfStatement();
verify( ifStmt, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_IF );
parseExpression( ContextType.pBOOLEAN_FALSE );
Expression e = popExpression();
// Bad assignment statement in if clause (mistaken for equality "==")
if( match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseExpression();
popExpression();
verify( ifStmt, false, Res.MSG_ASSIGNMENT_IN_IF_STATEMENT );
}
verify( ifStmt, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_IF );
getSymbolTable().pushScope();
boolean statementParsed = false;
try
{
_ctxInferenceMgr.pushLastCtx();
statementParsed = parseStatement();
}
finally
{
getSymbolTable().popScope();
_ctxInferenceMgr.popCtx( false );
}
verify( ifStmt, statementParsed, Res.MSG_EXPECTING_STATEMENT );
if( statementParsed )
{
Statement stmt = popStatement();
ifStmt.setExpression( e );
ifStmt.setStatement( stmt );
// Swallow a semicolon if necessary
match( null, ';' );
if( match( null, Keyword.KW_else ) )
{
getSymbolTable().pushScope();
boolean elseStmtParsed;
try
{
elseStmtParsed = parseStatement();
}
finally
{
getSymbolTable().popScope();
}
verify( ifStmt, elseStmtParsed, Res.MSG_EXPECTING_STATEMENT );
if( elseStmtParsed )
{
ifStmt.setElseStatement( popStatement() );
}
}
}
pushStatement( ifStmt );
}
private void parseUsingStatement()
{
UsingStatement usingStmt = new UsingStatement();
verify( usingStmt, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_USING );
_symTable.pushScope();
try
{
parseVarStatementsInUsingStatement( usingStmt );
List<IVarStatement> varStatements = usingStmt.getVarStatements();
for(IVarStatement vs : varStatements)
{
((VarStatement)vs).setFinal(true);
verify( usingStmt, vs.getHasInitializer(), Res.MSG_VAR_MIGHT_NOT_HAVE_BEEN_INIT, vs.getSymbol().getName() );
}
if( usingStmt.getVarStatements().isEmpty() )
{
parseExpression();
Expression expr = popExpression();
usingStmt.setExpression( expr );
verifyTypeForUsingStatementPredicate( expr, expr.getType() );
}
verify( usingStmt, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_USING );
if( verify( usingStmt, match( null, null, '{', true ), Res.MSG_EXPECTING_LEFTBRACE_STMTBLOCK ) )
{
parseStatement();
usingStmt.setStatement( popStatement() );
}
if( match( null, Keyword.KW_finally ))
{
if( verify( usingStmt, match( null, null, '{', true ), Res.MSG_EXPECTING_LEFTBRACE_STMTBLOCK ) )
{
parseStatement();
Statement finallyStmt = popStatement();
usingStmt.setFinallyStatement( finallyStmt );
}
}
pushStatement( usingStmt );
}
finally
{
_symTable.popScope();
}
}
private void parseVarStatementsInUsingStatement( UsingStatement usingStmt )
{
Token T = new Token();
List<IVarStatement> varStmts = new ArrayList<>();
int iOffset;
int iLineNum;
int iColumn;
do
{
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getTokenColumn();
if( match( null, Keyword.KW_var ) )
{
VarStatement varStmt = new VarStatement();
int iNameOffset = getTokenizer().getTokenStart();
if( verify( varStmt, match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_VAR ) )
{
varStmt.setNameOffset( iNameOffset, null );
}
else
{
T._strValue = null;
}
parseVarStatement( varStmt, T, false );
setLocation( iOffset, iLineNum, iColumn );
varStmt = (VarStatement)popStatement();
verifyTypeForUsingStatementPredicate( varStmt, varStmt.getType() );
varStmts.add( varStmt );
}
else
{
break;
}
}while( match( null, ',' ) );
if( varStmts.size() > 0 )
{
usingStmt.setVarStatements( varStmts );
// Add a synthetic noop-stmt to avoid "multiple stmts on same line" warning wrt
// the using stmt's var stmts and its body e.g., if the stmt body stmt list puts
// an opening brace on same line as vars
pushStatement( new NoOpStatement() );
setLocation( getTokenizer().getTokenStart(), iLineNum, getTokenizer().getTokenColumn(), true );
popStatement();
}
}
private void verifyTypeForUsingStatementPredicate( ParsedElement pe, IType type )
{
maybeRemoveIMonitorLockError( pe );
boolean bAssignableFromUsingType =
isAssignableFrom(JavaTypes.LOCK(), type) ||
isAssignableFrom(JavaTypes.getJreType(Closeable.class), type ) ||
isAssignableFrom(JavaTypes.getGosuType( IReentrant.class ), type ) ||
isAssignableFrom(GosuTypes.IDISPOSABLE(), type ) ||
type == GosuTypes.IMONITORLOCK() ||
type.getTypeInfo().getMethod( "dispose" ) != null ||
type.getTypeInfo().getMethod( "close" ) != null ||
(type.getTypeInfo().getMethod( "lock" ) != null && type.getTypeInfo().getMethod( "unlock" ) != null);
verify( pe, bAssignableFromUsingType, Res.MSG_BAD_TYPE_FOR_USING_STMT );
}
private boolean isAssignableFrom(IType type1, IType type2) {
return type1 != null && type1.isAssignableFrom( type2 );
}
private void maybeRemoveIMonitorLockError( ParsedElement pe )
{
if( pe instanceof TypeAsExpression )
{
ParseTree after = pe.getLocation().getChildAfter(((TypeAsExpression) pe).getLHS().getLocation());
if( after != null && after.getParsedElement() instanceof TypeLiteral )
{
//noinspection ThrowableResultOfMethodCallIgnored
after.getParsedElement().removeParseException(Res.MSG_IMONITOR_LOCK_SHOULD_ONLY_BE_USED_WITHIN_USING_STMTS);
}
}
else if( pe instanceof VarStatement )
{
maybeRemoveIMonitorLockError( ((VarStatement)pe).getAsExpression() );
}
else if( pe instanceof ParenthesizedExpression )
{
maybeRemoveIMonitorLockError( ((ParenthesizedExpression)pe).getExpression() );
}
}
private void parseStatementBlock()
{
parseStatementBlock( true );
}
private void parseStatementBlock( boolean bMatchClosingBrace )
{
parseStatementBlock( false, bMatchClosingBrace );
}
private void parseStatementBlock( boolean forceKeepStmtBlock, boolean bMatchClosingBrace )
{
_symTable.pushScope();
if( !bMatchClosingBrace )
{
decStatementDepth();
}
try
{
ArrayList<Statement> statements = new ArrayList<>();
parseStatementsAndDetectUnreachable( statements );
StatementList stmtList = new StatementList( _symTable );
Token closingBraceToken = bMatchClosingBrace ? new Token() : null;
verify( stmtList, !bMatchClosingBrace || match( closingBraceToken, '}' ), Res.MSG_EXPECTING_RIGHTBRACE_STMTBLOCK );
if( closingBraceToken != null )
{
stmtList.setLastLineNumber( closingBraceToken.getLine() );
}
stmtList.setStatements( statements );
boolean dontOptimizeStatementLists = forceKeepStmtBlock || isDontOptimizeStatementLists();
pushStatement( dontOptimizeStatementLists ? stmtList : stmtList.getSelfOrSingleStatement() );
}
finally
{
if( !bMatchClosingBrace )
{
incStatementDepth();
}
_symTable.popScope();
}
}
void parseNamespaceStatement()
{
if( isEditorParser() )
{
parseNamespaceStatement_editor();
}
else
{
parseNamespaceStatement_normal();
}
}
void parseNamespaceStatement_editor()
{
NamespaceStatement namespaceStmt = new NamespaceStatement();
parseExpression();
Expression namespace = popExpression();
//noinspection ThrowableResultOfMethodCallIgnored
namespace.clearParseExceptions(); //Res.MSG_EXPECTING_TYPE_TO_FOLLOW_PACKAGE_NAME );
IGosuClassInternal gsClass = (IGosuClassInternal)getScriptPart().getContainingType();
String strNamespace = namespace.toString();
verify( namespaceStmt, strNamespace.equals( gsClass.getNamespace() ), Res.MSG_WRONG_NAMESPACE, strNamespace, gsClass.getNamespace() );
setNamespace( strNamespace );
namespaceStmt.setNamespace( strNamespace );
pushStatement( namespaceStmt );
while( match( null, ';' ) )
{
//pushStatement( new NoOpStatement() );
}
}
void parseNamespaceStatement_normal()
{
NamespaceStatement namespaceStmt = new NamespaceStatement();
int mark = getTokenizer().mark();
String strToken = null;
if( verify( namespaceStmt, match( null, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_TYPELITERAL_OR_NAMESPACE ) )
{
strToken = getTokenizer().getTokenAt( mark ).getStringValue();
}
String strNamespace = parseDotPathWord( strToken );
strNamespace = strNamespace == null ? "" : strNamespace;
IGosuClassInternal gsClass = (IGosuClassInternal)getScriptPart().getContainingType();
verify( namespaceStmt, strNamespace.equals( gsClass.getNamespace() ), Res.MSG_WRONG_NAMESPACE, strNamespace, gsClass.getNamespace() );
setNamespace( strNamespace );
namespaceStmt.setNamespace( strNamespace );
pushStatement( namespaceStmt );
while( match( null, ';' ) )
{
//pushStatement( new NoOpStatement() );
}
}
@Override
public UsesStatementList parseUsesStatementList( boolean bResolveUsesTypes )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
int iUsesListOffset = iOffset;
int iUsesListLineNum = iLineNum;
if( match( null, Keyword.KW_uses, true ) )
{
List<IUsesStatement> usesList = new ArrayList<>();
UsesStatementList stmtList = new UsesStatementList();
stmtList.setUsesStatements( usesList );
while( match( null, Keyword.KW_uses ) )
{
getOwner().parseUsesStatement( bResolveUsesTypes );
setLocation( iOffset, iLineNum, iColumn );
UsesStatement stmt = (UsesStatement)popStatement();
//noinspection ThrowableResultOfMethodCallIgnored
stmt.removeParseWarningRecursively( Res.MSG_DEPRECATED_MEMBER ); // don't show these in uses statements
IUsesStatement duplicate = stmtList.conflicts( stmt );
if( duplicate != null )
{
if( warn( stmt, !duplicate.getTypeName().equals( stmt.getTypeName() ), Res.MSG_USES_STMT_DUPLICATE ) )
{
verify( stmt, false, Res.MSG_USES_STMT_CONFLICT, duplicate.getTypeName() );
}
}
usesList.add( stmt );
getOwner().checkInstruction( true );
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
}
pushStatement( stmtList );
setLocation( iUsesListOffset, iUsesListLineNum, iColumn, true );
popStatement();
return stmtList;
}
return null;
}
void parseUsesStatement()
{
parseUsesStatement( true );
}
void parseUsesStatement( boolean bResolveTypes )
{
UsesStatement usesStmt = new UsesStatement();
parseTypeLiteral();
TypeLiteral typeLiteral = (TypeLiteral)peekExpression();
Token token = _tokenizer.getCurrentToken();
if( SourceCodeTokenizer.TT_OPERATOR == token.getType() && "#".equals( token.getStringValue() ) )
{
int mark = _tokenizer.mark();
_tokenizer.nextToken();
Token nextToken = _tokenizer.getCurrentToken();
if( SourceCodeTokenizer.TT_OPERATOR == nextToken.getType() && "*".equals( nextToken.getStringValue() ) )
{
_tokenizer.nextToken();
processUsesStatementFeatureLiteral( bResolveTypes, usesStmt, typeLiteral, null );
}
else
{
IType type = typeLiteral.getType().getType();
if( type != null && !(type instanceof IGosuClass) )
{
IGosuClassInternal gsClass = IGosuClassInternal.Util.getGosuClassFrom( type );
if( gsClass != null )
{
typeLiteral.setType( gsClass );
}
}
_tokenizer.restoreToMark( mark );
parseFeatureLiteral( token, typeLiteral );
FeatureLiteral fl = (FeatureLiteral)popExpression();
processUsesStatementFeatureLiteral( bResolveTypes, usesStmt, typeLiteral, fl );
}
}
else
{
popExpression();
processUsesStatementTypeLiteral( bResolveTypes, usesStmt, typeLiteral );
}
}
private void processUsesStatementFeatureLiteral( boolean bResolveTypes, UsesStatement usesStmt, TypeLiteral typeLiteral, FeatureLiteral fl )
{
String t = typeLiteral.getType().getType() instanceof ErrorType && typeLiteral.getPackageExpression() != null
? typeLiteral.getPackageExpression().toString()
: TypeLord.getPureGenericType( typeLiteral.getType().getType() ).getName();
boolean bForwardRefToInnerClass = getGosuClass() instanceof IGosuClassInternal && t != null && t.startsWith( getGosuClass().getName() );
verify( usesStmt, t == null || !t.endsWith( "]" ), Res.MSG_BAD_NAMESPACE, t );
if( !bForwardRefToInnerClass || ((IGosuClassInternal)getGosuClass()).isHeaderCompiled() )
{
IType type = typeLiteral.getType().getType();
IGosuClass gsType = IGosuClassInternal.Util.getGosuClassFrom( type );
verify( usesStmt, gsType != null, Res.MSG_ONLY_GOSU_JAVA_TYPES );
if( fl == null )
{
usesStmt.setTypeName( TypeLord.getPureGenericType( typeLiteral.getType().getType() ).getName() );
usesStmt.setFeatureSpace( true );
if( gsType != null )
{
getTypeUsesMap().addToTypeUses( usesStmt );
}
}
else
{
if( bResolveTypes )
{
String strTypeName = TypeLord.getPureGenericType( typeLiteral.getType().getType() ).getName();
usesStmt.setTypeName( strTypeName );
if( typeLiteral.hasParseExceptions() )
{
IParseIssue first = typeLiteral.getParseExceptions().get( 0 );
usesStmt.addParseException( first );
//noinspection ThrowableResultOfMethodCallIgnored
typeLiteral.removeParseException( first.getMessageKey() );
}
else if( fl.hasParseExceptions() )
{
IFeatureInfo feature = fl.getFeature();
List<IAttributedFeatureInfo> features = Collections.emptyList();
if( feature != null )
{
features = getAllStaticFeatures( gsType, feature.getName() );
}
if( !features.isEmpty() )
{
usesStmt.setFeatureSpace( true );
for( IAttributedFeatureInfo f : features )
{
UsesStatement stmt = new UsesStatement();
stmt.setTypeName( strTypeName );
processUsesStatement( stmt, typeLiteral, f, gsType );
}
}
else
{
IParseIssue first = fl.getParseExceptions().get( 0 );
usesStmt.addParseException( first );
//noinspection ThrowableResultOfMethodCallIgnored
fl.removeParseException( first.getMessageKey() );
}
}
else if( verify( usesStmt, fl.isStaticish() && !fl.isConstructorLiteral(), Res.MSG_CANNOT_REFERENCE_NON_STATIC_FEATURE_HERE ) )
{
processUsesStatement(usesStmt, typeLiteral, fl.getFeature(), gsType);
}
}
else
{
IFeatureInfo feature = fl.getFeature();
usesStmt.setTypeName( t );
usesStmt.setFeatureInfo( feature );
if( gsType != null && feature != null && !(feature.getOwnersType() instanceof ErrorType))
{
getTypeUsesMap().addToTypeUses( usesStmt );
}
}
}
}
pushStatement( usesStmt );
while( match( null, ';' ) )
{
//pushStatement( new NoOpStatement() );
}
}
private void processUsesStatement( UsesStatement usesStmt, TypeLiteral typeLiteral, IFeatureInfo fi, IGosuClass gsType )
{
usesStmt.setFeatureInfo( fi );
if( gsType != null )
{
getTypeUsesMap().addToTypeUses( usesStmt );
}
ICompilableTypeInternal gsClass = getGosuClass();
if( gsClass != null )
{
verify( typeLiteral, !typeLiteral.getType().getType().getRelativeName().equals( gsClass.getRelativeName() ),
Res.MSG_SAME_NAME_AS_CLASS, gsClass.getRelativeName() );
}
}
private List<IAttributedFeatureInfo> getAllStaticFeatures( IGosuClass gsType, String name )
{
List<IAttributedFeatureInfo> res = new ArrayList<>();
if( name == null || gsType == null )
{
return res;
}
IGosuClassTypeInfo typeInfo = gsType.getTypeInfo();
DynamicArray<? extends IMethodInfo> methods = typeInfo.getMethods( getGosuClass() ).getMethods( name );
for( IMethodInfo m : methods )
{
if( m.isStatic() )
{
res.add( m );
}
}
List<? extends IPropertyInfo> properties = typeInfo.getProperties( getGosuClass() );
for( IPropertyInfo p : properties )
{
if( name.equals( p.getName() ) && p.isStatic() )
{
res.add( p );
}
}
return res;
}
private void processUsesStatementTypeLiteral( boolean bResolveTypes, UsesStatement usesStmt, TypeLiteral typeLiteral )
{
String t = typeLiteral.getType().getType() instanceof ErrorType && typeLiteral.getPackageExpression() != null
? typeLiteral.getPackageExpression().toString()
: TypeLord.getPureGenericType( typeLiteral.getType().getType() ).getName();
boolean bForwardRefToInnerClass = getGosuClass() instanceof IGosuClassInternal && t != null && t.startsWith( getGosuClass().getName() );
verify( usesStmt, t == null || !t.endsWith( "]" ), Res.MSG_BAD_NAMESPACE, t );
if( !bForwardRefToInnerClass || ((IGosuClassInternal)getGosuClass()).isHeaderCompiled() )
{
if( t.endsWith( "*" ) && match( null, "*", SourceCodeTokenizer.TT_OPERATOR ) )
{
typeLiteral.clearParseExceptions();
usesStmt.setTypeName( t );
if( verify( usesStmt, t.endsWith( ".*" ), Res.MSG_BAD_NAMESPACE, t ) )
{
String namespace = t.substring( 0, t.length() - 2 );
IType type = TypeSystem.getNamespace( namespace );
if( type == null )
{
type = TypeSystem.getByFullNameIfValid( namespace );
}
verify( usesStmt, type != null, Res.MSG_BAD_NAMESPACE, namespace );
}
getTypeUsesMap().addToTypeUses( usesStmt );
}
else
{
if( bResolveTypes )
{
String strTypeName = TypeLord.getPureGenericType( typeLiteral.getType().getType() ).getName();
usesStmt.setTypeName( strTypeName );
if( typeLiteral.hasParseExceptions() )
{
IParseIssue first = typeLiteral.getParseExceptions().get( 0 );
usesStmt.addParseException( first );
//noinspection ThrowableResultOfMethodCallIgnored
typeLiteral.removeParseException( first.getMessageKey() );
}
else
{
getTypeUsesMap().addToTypeUses( usesStmt );
ICompilableTypeInternal gsClass = getGosuClass();
if( gsClass != null )
{
verify( typeLiteral, !typeLiteral.getType().getType().getRelativeName().equals( gsClass.getRelativeName() ),
Res.MSG_SAME_NAME_AS_CLASS, gsClass.getRelativeName() );
}
}
}
else
{
usesStmt.setTypeName( t );
getTypeUsesMap().addToTypeUses( usesStmt );
}
}
}
pushStatement( usesStmt );
while( match( null, ';' ) )
{
//pushStatement( new NoOpStatement() );
}
}
void parseCaseClauses( SwitchStatement switchStmt )
{
List<CaseClause> cases = new ArrayList<>();
while( parseCaseClause( switchStmt, cases ) )
{ /* do nothing */ }
switchStmt.setCases( cases.toArray( new CaseClause[cases.size()] ) );
}
boolean parseCaseClause( SwitchStatement switchStmt, List<CaseClause> cases )
{
int iOffset = getTokenizer().getTokenStart();
int iLine = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( !match( null, Keyword.KW_case ) )
{
return false;
}
if( !cases.isEmpty() )
{
warnIfCaseNotTerminated( cases.get( cases.size()-1 ).getStatements() );
}
Expression switchExpr = switchStmt.getSwitchExpression();
parseExpression( new ContextType( switchExpr.getType() ) );
verify( switchStmt, match( null, ":", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_CASE_COLON );
Expression e = popExpression();
verifyCaseIsUnique( e, cases );
boolean typeInferred = switchExpr instanceof TypeOfExpression && e instanceof TypeLiteral && isIsolatedCase( cases );
List<Statement> statements = new ArrayList<>();
CaseClause caseClause = new CaseClause( e, statements );
cases.add( caseClause );
if( typeInferred )
{
IType caseExprType = (IType)e.evaluate();
Expression typeOfExpr = ((TypeOfExpression)switchExpr).getExpression();
typeInferred = checkComparableAndCastable( typeOfExpr, e );
if( verify( e, typeInferred, Res.MSG_TYPE_MISMATCH, typeOfExpr.getType().getDisplayName(), caseExprType.getDisplayName() ) )
{
TypeOfExpression toe = (TypeOfExpression)switchExpr;
_ctxInferenceMgr.pushCtx();
_ctxInferenceMgr.updateType( toe.getExpression(), caseExprType );
}
}
_symTable.pushScope();
boolean typeInferenceCancelled = false;
try
{
for( Statement stmt = null; true; )
{
// must cancel inference before the next case or default clause is parsed
if( match( null, Keyword.KW_case, true ) ||
match( null, Keyword.KW_default, true ) )
{
if( typeInferred )
{
_ctxInferenceMgr.popCtx( false );
typeInferenceCancelled = true;
}
break;
}
if( !parseStatement() )
{
break;
}
stmt = popStatementAndDetectUnreachable( stmt, statements );
}
}
finally
{
_symTable.popScope();
if( typeInferred && !typeInferenceCancelled )
{
_ctxInferenceMgr.popCtx( false );
}
}
pushExpression( caseClause );
setLocation( iOffset, iLine, iColumn, true );
popExpression();
return true;
}
private void verifyCaseIsUnique( Expression e, List<CaseClause> cases )
{
if( e.getType() instanceof IErrorType || !e.isCompileTimeConstant() && !(e instanceof Literal) )
{
if( e instanceof ImplicitTypeAsExpression )
{
verifyCaseIsUnique( ((ImplicitTypeAsExpression)e).getLHS(), cases );
}
return; // Can't verify this
}
Object value;
try {
value = e.evaluate();
if( value == null && e instanceof TypeAsExpression ) {
// sometimes a coercer isn't available in a compile-time environment, so bail if null
return;
}
}
catch( Exception err ) {
return;
}
for( CaseClause cc: cases )
{
Expression expr = cc.getExpression();
if( expr instanceof ImplicitTypeAsExpression )
{
expr = ((ImplicitTypeAsExpression)expr).getLHS();
}
if( expr != null && !expr.hasParseExceptions() && (expr.isCompileTimeConstant() || expr instanceof Literal) ) {
Object csr;
try {
csr = expr.evaluate();
}
catch( Exception err ) {
// skip over evaluation errors
continue;
}
verify( e,
!GosuObjectUtil.equals( csr, value ),
Res.MSG_DUPLICATE_CASE_EXPRESSION );
}
}
}
private boolean isIsolatedCase( List<CaseClause> cases )
{
if( cases.isEmpty() )
{
return true;
}
else
{
CaseClause caseClause = cases.get( cases.size() - 1 );
List<Statement> statements = caseClause.getStatements();
int i;
for( i = statements.size()-1; i >= 0 && statements.get( i ) instanceof NoOpStatement; i
{ /* Loop until we find the index of the last non-noop statement */ }
if( i >= 0 )
{
boolean[] bAbsolute = {false};
return statements.get( i ).getLeastSignificantTerminalStatement( bAbsolute ) != null && bAbsolute[0];
}
}
return false;
}
private void warnIfCaseNotTerminated( List<Statement> statements )
{
int i;
for( i = statements.size()-1; i >= 0 && statements.get( i ) instanceof NoOpStatement; i
{ /* Loop until we find the index of the last non-noop statement */ }
if( i >= 0 )
{
Statement lastStmt = statements.get( i );
boolean[] bAbsolute = {false};
verifyOrWarn( lastStmt, lastStmt.hasParseExceptions() || lastStmt.getLeastSignificantTerminalStatement( bAbsolute ) != null && bAbsolute[0],
true, Res.MSG_NONTERMINAL_CASE_CLAUSE );
}
}
boolean parseDefaultClause( SwitchStatement switchStmt, List<CaseClause> cases )
{
if( !match( null, Keyword.KW_default ) )
{
return false;
}
if( cases.size() > 0 )
{
warnIfCaseNotTerminated( cases.get( cases.size()-1 ).getStatements() );
}
verify( switchStmt, match( null, ":", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_CASE_COLON );
verify( switchStmt, switchStmt.getDefaultStatements() == null, Res.MSG_MULTIPLE_DEFAULT_CLAUSES_NOT_PERMITTED );
_symTable.pushScope();
try
{
List<Statement> defaultStatements = new ArrayList<>();
parseStatementsAndDetectUnreachable( defaultStatements );
switchStmt.setDefaultStatements( defaultStatements );
}
finally
{
_symTable.popScope();
}
return true;
}
void checkInstruction( boolean bProcessDirectives )
{
// If the tokenizer has an 'instructor', it may be in a state where
// it needs to analyze tokens in a separate context e.g., in a template
// an expressoin of the form <%=foo.bar%>. Thus "analyze separately"
// means to parse these as expressions and throw them away. Note we can
// throw them away because instructors are indicative of pure analysis
// of the source; the object code is of no interest e.g., a template
// verifier may use an instructor.
checkUnexpectedEof();
while( !_tokenizer.isEOF() && (_tokenizer.isAnalyzingSeparately() || _tokenizer.isAnalyzingDirective()) )
{
if( _tokenizer.isAnalyzingDirective() )
{
parseDirective( bProcessDirectives );
}
else if( !(getGosuClass() instanceof IGosuClass) ||
!((CompilationState)((IGosuClass)getGosuClass()).getCompilationState()).isReparsingHeader() )
{
// Only eat <%= xx %> expressions *after* the header is parsed during definition phase.
// They need to be parsed within the program evaluate() body parsing phase, which includes class members etc.
int iOffset = _tokenizer.getTokenStart();
parseExpression();
Expression e = popExpression();
clearExpressionInTemplateUnlessParsingEvaluateFunctionBody( e );
if( iOffset == _tokenizer.getTokenStart() )
{
break;
}
}
else
{
break;
}
}
}
private void checkUnexpectedEof()
{
if( _tokenizer.isEOF() && _tokenizer.isAnalyzingSeparately() )
{
if( getGosuClass() != null )
{
((ParsedElement)getGosuClass().getClassStatement()).addParseException( makeFullParserState(), Res.MSG_UNEXPECTED_EOF );
}
}
}
private void clearExpressionInTemplateUnlessParsingEvaluateFunctionBody( Expression e )
{
// Retain the parse tree (location) for template expressions when we are parsing the
// template as a class and *only* during the phase where we are parsing the program
// entry point aka the evaluate() function. Otherwise throw out the parse tree.
if( !isParsingProgram() &&
getGosuClass() instanceof IGosuClass &&
(((IGosuClass)getGosuClass()).getCompilationState().isDeclarationsCompiled() &&
!((CompilationState)((IGosuClass)getGosuClass()).getCompilationState()).isReparsingHeader()) )
{
if( !isParsingFunction() || (getProgramEntryPointDfs() != null && !peekParsingFunction().equals( getProgramEntryPointDfs().getType() )) )
{
e.clearParseExceptions();
removeLocation( e.getLocation() );
removeInnerClasses( e );
}
}
}
private void removeInnerClasses( IParsedElement e ) {
if( e == null ) {
return;
}
if( e instanceof BlockExpression ) {
IBlockClass blockGosuClass = ((BlockExpression)e).getBlockGosuClass();
if( blockGosuClass != null && blockGosuClass.getEnclosingType() != null ) {
ICompilableTypeInternal enclosingType = (ICompilableTypeInternal)blockGosuClass.getEnclosingType();
enclosingType.removeBlock( blockGosuClass );
}
}
else if( e instanceof NewExpression && ((NewExpression)e).isAnonymousClass() ) {
IGosuClassInternal anon = (IGosuClassInternal)((Expression)e).getType();
if( anon != null ) {
((IGosuClassInternal)anon.getEnclosingType()).removeInnerClass( anon );
}
}
IParseTree location = e.getLocation();
if( location != null ) {
for( IParseTree pt: location.getChildren() ) {
removeInnerClasses( pt.getParsedElement() );
}
}
}
private void removeLocation( ParseTree location )
{
if( location.getParent() != null )
{
location.getParent().removeChild( location );
}
getLocationsList().remove( location );
}
private void parseDirective( boolean processDirectives )
{
final Token token = _tokenizer.getCurrentToken();
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
DirectiveExpression e = new DirectiveExpression();
if( Keyword.KW_extends == token.getKeyword() )
{
_tokenizer.nextToken();
parseTypeLiteral();
Expression typeLiteral = peekExpression();
if( typeLiteral instanceof TypeLiteral )
{
IType extendsType = ((TypeLiteral)typeLiteral).getType().getType();
if( extendsType instanceof IGosuClassInternal )
{
IGosuClassInternal supertype = (IGosuClassInternal)extendsType;
supertype.putClassMembers( this, _symTable, supertype, true );
List<? extends GosuClassTypeLoader> typeLoaders = TypeSystem.getCurrentModule().getTypeLoaders( GosuClassTypeLoader.class );
for( GosuClassTypeLoader typeLoader : typeLoaders )
{
List<? extends IGosuEnhancement> enhancementsForType = typeLoader.getEnhancementIndex().getEnhancementsForType( supertype );
for( IGosuEnhancement enhancement : enhancementsForType )
{
if( enhancement instanceof IGosuEnhancementInternal )
{
((IGosuEnhancementInternal)enhancement).putClassMembers( this, _symTable, supertype, true );
}
}
}
for( Object entryObj : _symTable.getSymbols().entrySet() )
{
//noinspection unchecked
Map.Entry<CharSequence, ISymbol> entry = (Map.Entry<CharSequence, ISymbol>)entryObj;
if( entry.getValue().isPrivate() )
{
_symTable.removeSymbol( entry.getKey() );
}
}
}
}
}
else if( "params".equals( token.getStringValue() ) )
{
_tokenizer.nextToken();
verify( e, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_FUNCTION_DEF );
ArrayList<ISymbol> params = parseParameterDeclarationList( e, false, null );
if( processDirectives )
{
for( ISymbol param : params )
{
getSymbolTable().putSymbol( param );
}
}
verify( e, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_FUNCTION_DEF );
}
else
{
advanceToNextTokenSilently();
e.addParseException( new ParseException( makeFullParserState(), Res.MSG_BAD_TEMPLATE_DIRECTIVE ) );
}
pushExpression( e );
setLocation( iOffset, iLineNum, iColumn );
popExpression();
}
boolean parseAssignmentOrMethodCall()
{
boolean bRet = true;
Token token = getTokenizer().getCurrentToken();
switch( token.getType() )
{
case SourceCodeTokenizer.TT_KEYWORD:
case SourceCodeTokenizer.TT_WORD:
case '(':
case '"':
break;
default:
return false;
}
int initialMark = _tokenizer.mark();
parsePrimaryExpression();
Expression e = popExpression();
if( e instanceof NewExpression )
{
// New Statement
NewStatement stmt = new NewStatement();
stmt.setNewExpression( (NewExpression)e );
pushStatement( stmt );
}
else if( e instanceof MethodCallExpression )
{
// Method Call Statement
MethodCallStatement stmt = new MethodCallStatement();
stmt.setMethodCall( (MethodCallExpression)e );
pushStatement( stmt );
//noinspection ThrowableResultOfMethodCallIgnored
e.removeParseException( Res.MSG_VOID_EXPRESSION_NOT_ALLOWED );
}
else if( e instanceof BeanMethodCallExpression )
{
// Bean Method Call Statement
BeanMethodCallStatement stmt = new BeanMethodCallStatement();
stmt.setBeanMethodCall( (BeanMethodCallExpression)e );
pushStatement( stmt );
//noinspection ThrowableResultOfMethodCallIgnored
e.removeParseException(Res.MSG_VOID_EXPRESSION_NOT_ALLOWED);
}
else if( e instanceof IBlockInvocation )
{
pushStatement( new BlockInvocationStatement( (IBlockInvocation)e ) );
//noinspection ThrowableResultOfMethodCallIgnored
e.removeParseException(Res.MSG_VOID_EXPRESSION_NOT_ALLOWED);
}
else if( e instanceof Identifier ||
(e instanceof ImplicitTypeAsExpression && ((ImplicitTypeAsExpression)e).getLHS() instanceof Identifier) )
{
//noinspection ThrowableResultOfMethodCallIgnored
e.removeParseException( Res.MSG_CLASS_PROPERTY_NOT_READABLE );
// Assigment Statement
parseAssignment( e, matchAssignmentOperator() );
}
else if( e instanceof ParenthesizedExpression )
{
NotAStatement nas = new NotAStatement();
nas.setExpression( e );
verify( nas, false, Res.MSG_EXPECTING_OPERATOR_TO_FOLLOW_EXPRESSION );
pushStatement( nas );
}
else if( e instanceof SynthesizedMemberAccess )
{
SyntheticMemberAccessStatement stmt = new SyntheticMemberAccessStatement((ISynthesizedMemberAccessExpression)e);
pushStatement( stmt );
}
else if( e instanceof MemberAccess ||
(e instanceof ImplicitTypeAsExpression && ((ImplicitTypeAsExpression)e).getLHS() instanceof MemberAccess))
{
//noinspection ThrowableResultOfMethodCallIgnored
e.removeParseException( Res.MSG_CLASS_PROPERTY_NOT_READABLE );
// Member Assignment Statement
MemberAssignmentStatement as = new MemberAssignmentStatement();
MemberAccess ma;
if( e instanceof ImplicitTypeAsExpression )
{
ma = (MemberAccess)((ImplicitTypeAsExpression)e).getLHS();
IParsedElement parent = ContextInferenceManager.unwrapImplicitTypeAs( ((ImplicitTypeAsExpression)e).getLHS() );
_locations.remove( e.getLocation() );
if( parent == null )
{
_locations.add( ma.getLocation() );
}
}
else
{
ma = (MemberAccess)e;
}
// This tests the validity of the access list
IType typeExpected = ma.getAssignableType();
String assignOp = matchAssignmentOperator();
if( verify( as, assignOp != null, Res.MSG_EXPECTING_EQUALS_ASSIGN ) )
{
try
{
// The only case this can be null is for associative array access.
if (ma.getMemberName() != null) {
verifyPropertyWritable( ma.getRootType(), ma.getMemberName(), false );
}
}
catch( Exception ex )
{
//noinspection ThrowableResultOfMethodCallIgnored
ma.addParseException( ParseException.wrap( ex, makeFullParserState() ) );
}
Expression rhs = parseAssignmentRhs( assignOp, typeExpected, e );
_ctxInferenceMgr.cancelInferences( ma, rhs );
typeExpected = ma.getAssignableType(); //update type in case an inference was cancelled
if( !(typeExpected instanceof ErrorType) )
{
IPropertyInfo lhsPi = ma.getPropertyInfo();
if( lhsPi instanceof IJavaPropertyInfo &&
((IJavaPropertyInfo)lhsPi).getWriteMethodInfo() == null &&
((IJavaPropertyInfo)lhsPi).getPublicField() != null )
{
typeExpected = TypeSystem.get(((IJavaPropertyInfo) lhsPi).getPublicField().getType());
}
}
verifyComparable( typeExpected, rhs );
rhs = buildRhsOfCompoundOperator( e, assignOp, rhs );
if( rhs.hasParseExceptions() )
{
rhs.getParseExceptions().get( 0 ).setExpectedType( typeExpected );
}
rhs = possiblyWrapWithImplicitCoercion(rhs, typeExpected);
as.setExpression( rhs );
as.setCompoundStatement( !"=".equals( assignOp ) );
}
//noinspection ThrowableResultOfMethodCallIgnored
ma.removeParseException( Res.MSG_CANNOT_READ_A_WRITE_ONLY_PROPERTY );
as.setRootExpression( ma.getRootExpression() );
as.setMemberName( ma.getMemberName() );
as.setMemberExpression( ma.getMemberExpression() );
as.setMemberAccess( ma );
pushStatement( as );
}
else if( e instanceof ArrayAccess )
{
// Array Assignment Statement
Statement statement;
ArrayAccess aa = (ArrayAccess)e;
IType typeExpected = aa.getComponentType();
Token T = new Token();
ArrayAssignmentStatement as = new ArrayAssignmentStatement();
String assignOp = matchAssignmentOperator();
if( verify( as, assignOp != null, Res.MSG_EXPECTING_EQUALS_ASSIGN ) )
{
IType type = aa.getRootExpression().getType();
verify( as, type != JavaTypes.STRING() && (!JavaTypes.CHAR_SEQUENCE().isAssignableFrom(type) ||
JavaTypes.STRING_BUILDER().isAssignableFrom(type) ||
JavaTypes.STRING_BUFFER().isAssignableFrom(type)), Res.MSG_STR_IMMUTABLE );
Expression rhs = parseAssignmentRhs( assignOp, typeExpected, aa );
verifyComparable( typeExpected, rhs );
rhs = buildRhsOfCompoundOperator( e, assignOp, rhs );
rhs = possiblyWrapWithImplicitCoercion( rhs, typeExpected );
as.setExpression( rhs );
as.setArrayAccessExpression( aa );
as.setCompoundStatement( !"=".equals( T._strValue ) );
statement = as;
}
else
{
NotAStatement nas = new NotAStatement();
statement = nas;
nas.setExpression( aa );
verify( nas, false, Res.MSG_NOT_A_STATEMENT );
}
pushStatement( statement );
}
else if( e instanceof MapAccess )
{
// Array Assignment Statement
Statement statement;
MapAccess ma = (MapAccess)e;
IType typeExpected = ma.getComponentType();
Token T = new Token();
MapAssignmentStatement as = new MapAssignmentStatement();
String assignOp = matchAssignmentOperator();
if( verify( as, assignOp != null, Res.MSG_EXPECTING_EQUALS_ASSIGN ) )
{
Expression rhs = parseAssignmentRhs( assignOp, typeExpected, e );
verifyComparable( typeExpected, rhs );
rhs = buildRhsOfCompoundOperator( e, assignOp, rhs );
rhs = possiblyWrapWithImplicitCoercion( rhs, ma.getComponentType() );
as.setExpression( rhs );
as.setMapAccessExpression( ma );
as.setCompoundStatement( !"=".equals( T._strValue ) );
statement = as;
}
else
{
NotAStatement nas = new NotAStatement();
statement = nas;
nas.setExpression( ma );
verify( nas, false, Res.MSG_NOT_A_STATEMENT );
}
pushStatement( statement );
}
else if( e instanceof FeatureLiteral )
{
NotAStatement nas = new NotAStatement();
nas.setExpression( e );
verify( nas, false, Res.MSG_NOT_A_STATEMENT );
pushStatement( nas );
}
else
{
_tokenizer.restoreToMark( initialMark );
_locations.remove( e.getLocation() );
bRet = false;
}
return bRet;
}
private void parseAssignment( Expression e, String assignOp )
{
Identifier id;
Statement statement;
if( assignOp != null )
{
if( e instanceof ImplicitTypeAsExpression )
{
id = (Identifier)((ImplicitTypeAsExpression)e).getLHS();
IParsedElement parent = ContextInferenceManager.unwrapImplicitTypeAs( ((ImplicitTypeAsExpression)e).getLHS() );
_locations.remove( e.getLocation() );
if( parent == null )
{
_locations.add( id.getLocation() );
}
}
else
{
id = (Identifier)e;
}
AssignmentStatement as = new AssignmentStatement();
statement = as;
as.setIdentifier( id );
IType expectedType = id.getAssignableType();
boolean incrOrDecr = "++".equals( assignOp ) || "--".equals( assignOp );
Expression rhs = parseAssignmentRhs( assignOp, expectedType, e );
rhs = buildRhsOfCompoundOperator( e, assignOp, rhs );
if( rhs.hasParseExceptions() )
{
rhs.getParseExceptions().get( 0 ).setExpectedType( expectedType );
}
ISymbol idSym = id.getSymbol();
verify( as, idSym.isWritable() ||
(idSym.isFinal() && !idSym.isStatic() && !(idSym instanceof CapturedSymbol) &&
((idSym.isLocal() && !((Symbol)idSym).isImplicitlyInitialized()) ||
(isParsingConstructor() || isParsingProgramEvaluateMethod()) && isSymbolInScopeDirectly( idSym ))), // initializing final var is ok
Res.MSG_PROPERTY_NOT_WRITABLE, idSym.getDisplayName() );
if( id instanceof PropertyAccessIdentifier )
{
DynamicPropertySymbol symbol = (DynamicPropertySymbol)idSym;
DynamicFunctionSymbol setter = symbol.getSetterDfs();
if(setter != null && setter.getScriptPart() != null) {
IType settersOwningType = setter.getScriptPart().getContainingType();
if( settersOwningType instanceof IGosuClassInternal )
{
IGosuClassInternal settersOwningClass = (IGosuClassInternal) settersOwningType;
ICompilableTypeInternal ctxType = getGosuClass();
if( ctxType instanceof IGosuClassInternal )
{
IGosuClassInternal ctxClass = (IGosuClassInternal)ctxType;
if( !settersOwningClass.isAccessible( ctxClass, setter ) )
{
id.addParseException( Res.MSG_PROPERTY_NOT_VISIBLE, idSym.getDisplayName() );
}
}
}
}
}
if( rhs instanceof Identifier )
{
Identifier identifier = (Identifier)rhs;
if( idSym != null && idSym.equals( identifier.getSymbol() ) )
{
as.addParseWarning( new ParseWarning( makeFullParserState(), Res.MSG_SILLY_ASSIGNMENT, idSym.getName() ) );
}
if( identifier.getSymbol() instanceof DynamicPropertySymbol )
{
DynamicPropertySymbol dps = (DynamicPropertySymbol) identifier.getSymbol();
if( dps.getVarIdentifier() != null && dps.getVarIdentifier().equals( idSym.getName() ) )
{
as.addParseWarning( new ParseWarning( makeFullParserState(), Res.MSG_SILLY_ASSIGNMENT, idSym.getName() ) );
}
}
}
//noinspection ThrowableResultOfMethodCallIgnored
e.removeParseException( Res.MSG_CANNOT_READ_A_WRITE_ONLY_PROPERTY );
_ctxInferenceMgr.cancelInferences( id, rhs );
if(!incrOrDecr || !isPrimitiveOrBoxedIntegerType( rhs.getType() ) || !isPrimitiveOrBoxedIntegerType( expectedType ) )
{
verifyComparable( expectedType, rhs );
}
rhs = possiblyWrapWithImplicitCoercion( rhs, expectedType );
as.setExpression( rhs );
}
else
{
NotAStatement nas = new NotAStatement();
statement = nas;
nas.setExpression( e );
verify( nas, false, Res.MSG_NOT_A_STATEMENT );
}
pushStatement( statement );
}
private boolean isParsingProgramEvaluateMethod() {
return isParsingFunction() && getProgramEntryPointDfs() != null &&
peekParsingFunction().equals( getProgramEntryPointDfs().getType() );
}
private boolean isSymbolInScopeDirectly( ISymbol idSym ) {
if( !(idSym instanceof DynamicSymbol) ) {
return true;
}
DynamicSymbol fieldSym = (DynamicSymbol)idSym;
IGosuClassInternal declaringClass = fieldSym.getGosuClass();
return getGosuClass() == declaringClass && !isParsingBlock();
}
private Expression parseAssignmentRhs( String operation, IType typeExpected, Expression lhs )
{
Expression rhs;
if( "++".equals( operation ) ||
"--".equals( operation ) )
{
AdditiveExpression add = new AdditiveExpression();
add.setLHS( lhs );
Expression one = new NumericLiteral( "1", 1, JavaTypes.pINT() );
IType type = resolveTypeForArithmeticExpression( lhs, lhs.getType(), operation, lhs.getType());
pushExpression( one );
setLocation( lhs.getLocation().getExtent() + 1, lhs.getLineNum(), lhs.getLocation().getColumn() + 1 );
popExpression();
one = possiblyWrapWithImplicitCoercion(one, type);
add.setRHS( one );
add.setOperator( "++".equals( operation ) ? "+" : "-" );
add.setType( type);
pushExpression( add );
setLocation( lhs.getLocation().getOffset(), lhs.getLineNum(), lhs.getLocation().getColumn()+1 );
popExpression();
rhs = add;
}
else
{
parseExpression( new ContextType( typeExpected ), false );
rhs = popExpression();
detectLikelyJavaCast( rhs );
}
return rhs;
}
private Expression buildRhsOfCompoundOperator( Expression lhs, String assignOp, Expression rhs )
{
Expression synthetic = null;
if( "+=".equals( assignOp ) || "-=".equals( assignOp ) )
{
AdditiveExpression add = new AdditiveExpression();
add.setLHS( lhs );
add.setRHS( rhs );
add.setOperator( assignOp.charAt( 0 ) == '+' ? "+" : "-" );
add.setType( resolveTypeForArithmeticExpression( add, lhs.getType(), assignOp, rhs.getType() ) );
synthetic = add;
}
else if( "*=".equals( assignOp ) || "/=".equals( assignOp ) || "%=".equals( assignOp ) )
{
MultiplicativeExpression mult = new MultiplicativeExpression();
mult.setLHS( lhs );
mult.setRHS( rhs );
mult.setOperator( String.valueOf( assignOp.charAt( 0 ) ) );
mult.setType( resolveTypeForArithmeticExpression( mult, lhs.getType(), assignOp, rhs.getType() ) );
synthetic = mult;
}
else if( "&=".equals( assignOp ) )
{
BitwiseAndExpression and = new BitwiseAndExpression();
lhs = ensureOperandIntOrLong( lhs );
rhs = ensureOperandIntOrLong( rhs );
rhs = possiblyWrapWithImplicitCoercion( rhs, lhs.getType() );
and.setLHS( lhs );
and.setRHS( rhs );
and.setType( resolveTypeForArithmeticExpression( and, lhs.getType(), assignOp, rhs.getType() ) );
synthetic = and;
}
else if( "&&=".equals( assignOp ) )
{
ConditionalAndExpression and = new ConditionalAndExpression();
verifyComparable( JavaTypes.pBOOLEAN(), rhs, true, true );
rhs = possiblyWrapWithImplicitCoercion( rhs, JavaTypes.pBOOLEAN() );
verifyComparable( JavaTypes.pBOOLEAN(), lhs, true, true );
lhs = possiblyWrapWithImplicitCoercion( lhs, JavaTypes.pBOOLEAN() );
and.setLHS( lhs );
and.setRHS( rhs );
synthetic = and;
}
else if( "^=".equals( assignOp ) )
{
BitwiseXorExpression xor = new BitwiseXorExpression();
lhs = ensureOperandIntOrLong( lhs );
rhs = ensureOperandIntOrLong( rhs );
rhs = possiblyWrapWithImplicitCoercion( rhs, lhs.getType() );
xor.setLHS( lhs );
xor.setRHS( rhs );
xor.setType( resolveTypeForArithmeticExpression( xor, lhs.getType(), assignOp, rhs.getType() ) );
synthetic = xor;
}
else if( "|=".equals( assignOp ) )
{
BitwiseOrExpression or = new BitwiseOrExpression();
lhs = ensureOperandIntOrLong( lhs );
rhs = ensureOperandIntOrLong( rhs );
rhs = possiblyWrapWithImplicitCoercion( rhs, lhs.getType() );
or.setLHS( lhs );
or.setRHS( rhs );
or.setType( resolveTypeForArithmeticExpression( or, lhs.getType(), assignOp, rhs.getType() ) );
synthetic = or;
}
else if( "||=".equals( assignOp ) )
{
ConditionalOrExpression or = new ConditionalOrExpression();
verifyComparable( JavaTypes.pBOOLEAN(), rhs, true, true );
rhs = possiblyWrapWithImplicitCoercion( rhs, JavaTypes.pBOOLEAN() );
verifyComparable( JavaTypes.pBOOLEAN(), lhs, true, true );
lhs = possiblyWrapWithImplicitCoercion( lhs, JavaTypes.pBOOLEAN() );
or.setLHS( lhs );
or.setRHS( rhs );
synthetic = or;
}
else if( "<<=".equals( assignOp ) || ">>=".equals( assignOp ) || ">>>=".equals( assignOp ) )
{
BitshiftExpression shift = new BitshiftExpression();
verifyTypesComparable( rhs, JavaTypes.pINT(), rhs.getType(), false, true );
rhs = possiblyWrapWithImplicitCoercion( rhs, JavaTypes.pINT() );
// Lhs must be an int or a long
IType lhsType = lhs.getType();
if( verify( lhs,
lhsType == JavaTypes.LONG() || lhsType == JavaTypes.pLONG() ||
lhsType == JavaTypes.INTEGER() || lhsType == JavaTypes.pINT() ||
lhsType == JavaTypes.SHORT() || lhsType == JavaTypes.pSHORT() ||
lhsType == JavaTypes.BYTE() || lhsType == JavaTypes.pBYTE(),
Res.MSG_BITSHIFT_LHS_MUST_BE_INT_OR_LONG ) )
{
lhsType = lhsType == JavaTypes.LONG() || lhsType == JavaTypes.pLONG() ? JavaTypes.pLONG() : JavaTypes.pINT();
lhs = possiblyWrapWithImplicitCoercion( lhs, lhsType );
}
shift.setLHS( lhs );
shift.setRHS( rhs );
shift.setOperator( assignOp );
shift.setType( resolveTypeForArithmeticExpression( shift, lhs.getType(), assignOp, rhs.getType() ) );
synthetic = shift;
}
if( synthetic != null )
{
pushExpression( synthetic );
ParseTree rhsLoc = rhs.getLocation();
setLocation( rhsLoc.getOffset(), rhs.getLineNum(), rhsLoc.getColumn(), true );
popExpression();
rhs = synthetic;
}
return rhs;
}
private String matchAssignmentOperator()
{
Token token = getTokenizer().getCurrentToken();
if( token.getType() == SourceCodeTokenizer.TT_OPERATOR )
{
String value = token.getStringValue();
switch( value )
{
case "=":
case "+=":
case "-=":
case "++":
case "
case "*=":
case "%=":
case "/=":
case "&=":
case "&&=":
case "^=":
case "|=":
case "||=":
case "<<=":
getTokenizer().nextToken();
return value;
default:
return matchRightShiftAssign();
}
}
return null;
}
private String matchRightShiftAssign()
{
int mark = getTokenizer().mark();
if( match( null, ">", SourceCodeTokenizer.TT_OPERATOR ) )
{
if( match( null, ">", SourceCodeTokenizer.TT_OPERATOR ) )
{
if( match( null, ">", SourceCodeTokenizer.TT_OPERATOR ) )
{
if( match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
return ">>>=";
}
}
else if( match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
return ">>=";
}
}
getTokenizer().restoreToMark( mark );
}
return null;
}
// function-declaration
// [modifiers] function <identifier> ( [ <argument-declaration-list> ] ) : <type-literal>
ISymbol parseFunctionOrPropertyDeclaration( ParsedElement element )
{
ModifierInfo modifiers;
do
{
while( match( null, Keyword.KW_uses ) )
{
parseUsesStatement();
popStatement();
}
modifiers = parseModifiers();
if( match( null, Keyword.KW_function ) )
{
DynamicFunctionSymbol symbol = parseFunctionDecl(element, modifiers);
eatStatementBlock( symbol != null && symbol.getDeclFunctionStmt() != null ? symbol.getDeclFunctionStmt() : element, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
return symbol;
}
if( match( null, Keyword.KW_property ) )
{
boolean bGetter = match( null, Keyword.KW_get );
boolean bSetter = !bGetter && match( null, Keyword.KW_set );
SourceCodeTokenizer tokenizer = getTokenizer();
int mark = tokenizer.mark();
boolean bNewPropertySyntax = false;
if( match( null, SourceCodeTokenizer.TT_WORD ) )
{
eatPossibleParametarization( true );
if( match( null, '(' ) )
{
tokenizer.restoreToMark( mark );
}
else
{
bNewPropertySyntax = true;
}
}
if( (bGetter || bSetter) && !bNewPropertySyntax)
{
FunctionStatement fs = new FunctionStatement();
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, true, bGetter, modifiers );
if( dfs == null )
{
element.addParseException( new ParseException( makeFullParserState(), Res.MSG_EXPECTING_DECL ) );
return null;
}
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
dfs.setClassMember( true );
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
DynamicPropertySymbol dps = getOrCreateDynamicPropertySymbol( element, null, dfs, bGetter );
dps.setClassMember(false);
return dps;
}
}
if( match( null, SourceCodeTokenizer.TT_EOF ) )
{
return null;
}
_tokenizer.nextToken();
} while( true );
}
boolean parsePropertyDefinition()
{
if( isParsingBlock() )
{
return false;
}
ModifierInfo modifiers = parseModifiers();
Token token = getTokenizer().getCurrentToken();
if( Keyword.KW_property == token.getKeyword() )
{
getTokenizer().nextToken();
boolean bGetter = match( null, Keyword.KW_get );
boolean bSetter = !bGetter && match( null, Keyword.KW_set );
token = getTokenizer().getCurrentToken();
int iOffset = token.getTokenStart();
int iLineNum = token.getLine();
int iColumn = token.getTokenColumn();
FunctionStatement functionStmt = parseBaseFunctionDefinition( null, true, bGetter, modifiers );
verify( functionStmt, bGetter || bSetter, Res.MSG_EXPECTING_PROPERTY_GET_OR_SET_MODIFIER );
setLocation( iOffset, iLineNum, iColumn );
getOwner().popStatement();
DynamicFunctionSymbol dfs = functionStmt.getDynamicFunctionSymbol();
if( dfs == null )
{
// Note, dfs is null only if the function stmt is errant
dfs = new DynamicFunctionSymbol( _symTable, "@", new FunctionType( "@", JavaTypes.pVOID(), null, GenericTypeVariable.EMPTY_TYPEVARS ), Collections.<ISymbol>emptyList(), (Statement)null );
functionStmt.setDynamicFunctionSymbol( dfs );
}
IType returnType = functionStmt.getDynamicFunctionSymbol().getReturnType();
verify( functionStmt, bGetter || returnType == JavaTypes.pVOID(), Res.MSG_PROPERTY_SET_MUST_RETURN_VOID );
dfs.setClassMember( false );
if( bGetter && dfs.getArgTypes() != null && dfs.getArgTypes().length > 0 )
{
List<IParseTree> children = functionStmt.getLocation().getChildren();
for( IParseTree child : children )
{
if( child.getParsedElement() instanceof ParameterDeclaration )
{
child.getParsedElement().addParseException(Res.MSG_GETTER_CANNOT_HAVE_PARAMETERS);
}
}
}
DynamicPropertySymbol dps = getOrCreateDynamicPropertySymbol( functionStmt, null, dfs, bGetter );
dps.setClassMember( false );
getOwner().pushStatement( new PropertyStatement( functionStmt, dps ) );
return true;
}
return false;
}
DynamicPropertySymbol getOrCreateDynamicPropertySymbol( ParsedElement parsedElement, IGosuClassInternal gsClass, DynamicFunctionSymbol dfs, boolean bGetter )
{
String strPropertyName = dfs.getDisplayName().substring( 1 );
ISymbol symbol = getSymbolTable().getSymbol( strPropertyName );
DynamicPropertySymbol dps;
if( !verify( parsedElement, symbol == null || symbol instanceof DynamicPropertySymbol, Res.MSG_VARIABLE_ALREADY_DEFINED, strPropertyName ) )
{
return new DynamicPropertySymbol( dfs, bGetter );
}
if( symbol == null ||
(gsClass != null &&
gsClass.getMemberProperty( strPropertyName ) == null &&
gsClass.getStaticProperty( strPropertyName ) == null) )
{
dps = new DynamicPropertySymbol( dfs, bGetter );
if( symbol != null )
{
assert symbol instanceof DynamicPropertySymbol;
dps.setParent( (DynamicPropertySymbol)symbol );
}
return dps;
}
assert symbol instanceof DynamicPropertySymbol;
dps = (DynamicPropertySymbol)symbol;
if( bGetter )
{
verify( parsedElement,
dps.getImmediateGetterDfs() == null ||
dps.getImmediateGetterDfs() instanceof VarPropertyGetFunctionSymbol ||
dps.getImmediateGetterDfs().getValueDirectly() != null ||
dps.getImmediateGetterDfs().isAbstract() ||
(gsClass != null && gsClass.isInterface()),
Res.MSG_GETTER_FOR_PROPERTY_ALREADY_DEFINED,
strPropertyName );
dps.setGetterDfs( dfs );
}
else
{
verify( parsedElement,
dps.getImmediateSetterDfs() == null ||
dps.getImmediateSetterDfs() instanceof VarPropertySetFunctionSymbol ||
dps.getImmediateSetterDfs().getValueDirectly() != null ||
dps.getImmediateSetterDfs().isAbstract() ||
(gsClass != null && gsClass.isInterface()),
Res.MSG_SETTER_FOR_PROPERTY_ALREADY_DEFINED,
strPropertyName );
dps.setSetterDfs( dfs );
}
return dps;
}
// function-definition
// [modifiers] function <identifier> ( [ <argument-declaration-list> ] ) : <type-literal> statement-block
boolean parseFunctionDefinition()
{
ModifierInfo modifiers = parseModifiers();
if( !isParsingBlock() && match( null, Keyword.KW_function ) )
{
parseBaseFunctionDefinition( new FunctionStatement(), false, false, modifiers );
return true;
}
return false;
}
FunctionStatement parseBaseFunctionDefinition(FunctionStatement functionStmt, boolean bProperty, boolean bGetter, ModifierInfo modifiers)
{
boolean bNullFunctionStmt = functionStmt == null;
functionStmt = bNullFunctionStmt ? new FunctionStatement() : functionStmt;
final Token token = getTokenizer().getCurrentToken();
int iOffsetName = token.getTokenStart();
int iLineNumName = token.getLine();
int iColumnName = token.getTokenColumn();
String strFunctionName = token.getStringValue();
boolean bHasName;
if( bHasName = verify( functionStmt, isWordOrValueKeyword( token ), Res.MSG_EXPECTING_NAME_FUNCTION_DEF ) )
{
getTokenizer().nextToken();
functionStmt.setNameOffset( iOffsetName, strFunctionName );
}
addNameInDeclaration( strFunctionName, iOffsetName, iLineNumName, iColumnName, bHasName );
maybeEatNonDeclKeyword( bHasName, strFunctionName );
strFunctionName = strFunctionName == null
? ""
: bProperty
? "@" + strFunctionName
: strFunctionName;
HashMap<String, ITypeVariableDefinition> origTypeVarMap = new HashMap<>( getTypeVariables() );
DynamicFunctionSymbol dfsDecl = findCorrespondingDeclDfs( iOffsetName, modifiers.getModifiers() );
List<TypeVariableDefinitionImpl> defsFromDecl = dfsDecl == null ? Collections.emptyList() : getTypeVarDefsFromDecl( dfsDecl.getType().getGenericTypeVariables() );
List<ITypeVariableDefinitionExpression> typeVarDefs = parseTypeVariableDefs( functionStmt, true, defsFromDecl );
// Must create function type and assign it as the type var's enclosing
// type *before* we parse the return type (in case it refs function's type vars)
IGenericTypeVariable[] typeVars = TypeVariableDefinition.getTypeVars( typeVarDefs );
if( bProperty && !typeVarDefs.isEmpty() )
{
verify( functionStmt, false, Res.MSG_GENERIC_PROPERTIES_NOT_SUPPORTED );
}
Expression annotationDefault = null;
try
{
ICompilableTypeInternal gsClass = getGosuClass();
boolean bAnnotation = gsClass instanceof IGosuClass && ((IGosuClass)gsClass).isAnnotation() && JavaTypes.ANNOTATION().isAssignableFrom( gsClass );
verify( functionStmt, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_FUNCTION_DEF );
ArrayList<ISymbol> args = null;
Statement statement = null;
boolean functionBodyParsed = true;
// Pushing an isolated scope to that the compile-time stack indexes are
// aligned with runtime e.g., args must be indexed relative to the
// activation record.
// We push a scope to parse the function decl, then we pop it to find the dfs, then we push it again to parse the body.
final IScriptPartId scriptPart = getScriptPart();
_symTable.pushIsolatedScope( new GosuParserTransparentActivationContext( scriptPart ) );
try
{
IType[] argTypes = null;
if( !match( null, null, ')', true ) )
{
args = parseParameterDeclarationList( functionStmt, Modifier.isStatic( modifiers.getModifiers() ), null, bProperty, bGetter, false, false );
argTypes = new IType[args.size()];
for( int i = 0; i < args.size(); i++ )
{
_symTable.putSymbol( args.get( i ) );
argTypes[i] = args.get( i ).getType();
}
}
else
{
parseParameterDeclarationList( functionStmt, Modifier.isStatic( modifiers.getModifiers() ), null, bProperty, bGetter, true, false );
verify ( functionStmt, ! bProperty || bGetter, Res.MSG_PROPERTY_SET_MUST_HAVE_ONE_PARAMETER );
}
verify( functionStmt, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_FUNCTION_DEF );
TypeLiteral typeLiteral;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
if( bGetter )
{
IType returnType = typeLiteral.getType().getType();
verify( typeLiteral, returnType != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
}
if( bAnnotation )
{
IType returnType = typeLiteral.getType().getType();
verify( typeLiteral, isValidAnnotationMethodReturnType( returnType ), Res.MSG_INVALID_TYPE_FOR_ANNOTATION_MEMBER );
}
}
else
{
verify( functionStmt, !bAnnotation, Res.MSG_EXPECTING_RETURN_TYPE_OR_FUN_BODY );
String[] fabricatedT = {Keyword.KW_void.toString()};
typeLiteral = resolveTypeLiteral( fabricatedT );
verify( functionStmt, !bGetter, Res.MSG_MISSING_PROPERTY_RETURN );
}
if( bAnnotation &&
match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
IType ctxType = typeLiteral.getType().getType();
if( JavaTypes.ANNOTATION().isAssignableFrom( ctxType ) )
{
List<IGosuAnnotation> anno = new ArrayList<>( 1 );
getOwner().parseAnnotation( anno );
annotationDefault = (Expression)anno.get( 0 ).getExpression();
}
else
{
parseExpression( new ContextType( ctxType, false, true ) );
annotationDefault = popExpression();
}
if( !annotationDefault.hasParseExceptions() )
{
verify( annotationDefault,
annotationDefault.isCompileTimeConstant() && !(annotationDefault instanceof NullExpression), // for some unknown reason the JVM does not allow null default values
Res.MSG_COMPILE_TIME_CONSTANT_REQUIRED );
}
}
// Note dfsDecl *should have* been assigned prior to parsing args etc. However it can be the case where
// e.g., crappy pcf types, the symbol can only be resolved from the _symTable. Sigh.
if( !bProperty && dfsDecl == null )
{
String functionNameWithArgs = DynamicFunctionSymbol.getSignatureName( strFunctionName == null ? "" : strFunctionName, args );
DynamicFunctionSymbol dfsInSymTable = (DynamicFunctionSymbol)_symTable.getSymbol( functionNameWithArgs );
if( dfsInSymTable == null )
{
dfsDecl = dfsInSymTable;
}
else if( dfsInSymTable.getGosuClass() == gsClass )
{
dfsDecl = dfsInSymTable;
dfsDecl = assignPossibleDuplicateDfs( dfsDecl, _symTable.getSymbols().values() );
}
}
FunctionType type = dfsDecl == null
? new FunctionType( strFunctionName, JavaTypes.pVOID(), null, typeVars )
: (FunctionType)dfsDecl.getType();
if( !bProperty && dfsDecl != null )
{
type = (FunctionType)dfsDecl.getType();
if( typeVarDefs != null )
{
for( ITypeVariableDefinitionExpression tvd : typeVarDefs )
{
((ITypeVariableDefinition)tvd).setEnclosingType( type );
}
}
}
else
{
type.setRetType( typeLiteral.getType().getType() );
type.setArgumentTypes( argTypes );
}
type.setScriptPart( scriptPart );
type.setModifiers( modifiers.getModifiers() );
putThisAndSuperSymbols( modifiers );
if( !Modifier.isAbstract( modifiers.getModifiers() ) && !Modifier.isNative( modifiers.getModifiers() ) )
{
if( scriptPart == null ||
!(scriptPart.getContainingType() instanceof IGosuClassInternal) ||
!scriptPart.getContainingType().isInterface() ||
match( null, null, '{', true ) ) // default iface method
{
if( parseFunctionBody( functionStmt, type ) )
{
statement = popStatement();
}
else
{
functionBodyParsed = false;
}
}
}
}
finally
{
_symTable.popScope();
}
ISymbol functionSymbol = _symTable.getSymbol( strFunctionName );
if( bProperty )
{
if( functionSymbol instanceof DynamicPropertySymbol )
{
DynamicPropertySymbol dps = (DynamicPropertySymbol)functionSymbol;
String functionNameWithArgs = DynamicFunctionSymbol.getSignatureName( strFunctionName, args );
dfsDecl = (dps == null || !dps.getName().equals( strFunctionName ) )
? (DynamicFunctionSymbol)_symTable.getSymbol( functionNameWithArgs )
: dps.getFunction( functionNameWithArgs );
}
else if( gsClass == null )
{
dfsDecl = findProgramPropertyDfs( strFunctionName, args );
}
}
if( dfsDecl != null &&
dfsDecl.getDeclFunctionStmt() != null &&
bNullFunctionStmt )
{
FunctionStatement funcStmtFromDecl = dfsDecl.getDeclFunctionStmt();
if( funcStmtFromDecl.getParent() != null &&
funcStmtFromDecl.getGosuClass() == gsClass )
{
functionStmt = dfsDecl.getDeclFunctionStmt();
}
}
verify( functionStmt, functionBodyParsed, Res.MSG_EXPECTING_RETURN_TYPE_OR_FUN_BODY );
verify( functionStmt, (gsClass != null && gsClass.isAbstract()) || !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_ABSTRACT_MEMBER_NOT_IN_ABSTRACT_CLASS );
verify( functionStmt, dfsDecl != null, Res.MSG_EXPECTING_NAME_FUNCTION_DEF );
verify( functionStmt, parsingFunctionsEncloseMyClass(), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_FUNCTION_DEF );
if( dfsDecl != null )
{
if( args != null )
{
//replace the decl time arg symbols with the impl time arg symbols
List<ISymbol> argList = dfsDecl.getArgs();
if( argList.isEmpty() )
{
argList = new ArrayList<>();
dfsDecl.setArgs( argList );
}
else
{
argList.clear();
}
argList.addAll( args );
}
// Overwrite annotations to use the new-expressions created in
dfsDecl.getModifierInfo().setAnnotations( modifiers.getAnnotations() );
if( dfsDecl instanceof EnhancementDynamicFunctionSymbol )
{
transferModifierInfo( functionStmt, modifiers, AnnotationUseSiteTarget.receiver, (EnhancementDynamicFunctionSymbol)dfsDecl );
}
dfsDecl.setAnnotationDefault( annotationDefault );
dfsDecl.setValueDirectly( statement );
pushDynamicFunctionSymbol( dfsDecl );
functionStmt.setDynamicFunctionSymbol( dfsDecl );
Statement lastStatement = getLastStatement( statement );
if( lastStatement != null && !lastStatement.hasParseExceptions() )
{
verifyOrWarn( functionStmt, isTerminal( lastStatement, dfsDecl.getReturnType() ),
!CommonServices.getEntityAccess().isUnreachableCodeDetectionOn(),
Res.MSG_MISSING_RETURN );
}
}
pushStatement( functionStmt );
DynamicFunctionSymbol dynamicFunctionSymbol = functionStmt.getDynamicFunctionSymbol();
if(dynamicFunctionSymbol != null && dynamicFunctionSymbol.getDeclFunctionStmt() == null )
{
dynamicFunctionSymbol.setDeclFunctionStmt(functionStmt);
}
return functionStmt;
}
finally
{
setTypeVariables( origTypeVarMap );
}
}
private Statement getLastStatement( Statement statement )
{
if( statement instanceof StatementList )
{
Statement[] statements = ((StatementList)statement).getStatements();
if( statements != null && statements.length > 0 )
{
return statements[statements.length-1];
}
}
return statement;
}
private boolean isValidAnnotationMethodReturnType( IType returnType )
{
return (returnType.isPrimitive() && returnType != JavaTypes.pVOID()) || returnType == JavaTypes.STRING() ||
returnType.getGenericType() == JavaTypes.CLASS() || JavaTypes.ANNOTATION().isAssignableFrom( returnType ) || returnType.isEnum() ||
(returnType.isArray() && isValidAnnotationMethodReturnType( returnType.getComponentType()));
}
private void putThisAndSuperSymbols( ModifierInfo modifiers )
{
if( !Modifier.isStatic( modifiers.getModifiers() ) &&
getScriptPart() != null &&
(getScriptPart().getContainingType() instanceof IGosuClassInternal) )
{
IGosuClassInternal gsClass = (IGosuClassInternal)getScriptPart().getContainingType();
IType thisType = gsClass;
if( gsClass instanceof IGosuEnhancementInternal )
{
thisType = ((IGosuEnhancementInternal)gsClass).getEnhancedType();
}
if( thisType != null )
{
thisType = TypeLord.getConcreteType( thisType );
getSymbolTable().putSymbol( new ThisSymbol( thisType, _symTable ) );
if( !(gsClass instanceof IGosuEnhancementInternal) )
{
IGosuClassInternal superClass = gsClass.getSuperClass();
if( superClass == null )
{
superClass = IGosuClassInternal.Util.getGosuClassFrom( JavaTypes.OBJECT() );
}
getSymbolTable().putSymbol( new Symbol( Keyword.KW_super.getName(), superClass, _symTable, null ) );
}
}
}
}
private DynamicFunctionSymbol findProgramPropertyDfs( String strFunctionName, ArrayList<ISymbol> args )
{
List<IFunctionSymbol> list = getDfsDeclsForFunction( (String)strFunctionName );
String propertyNameWithArgs = DynamicFunctionSymbol.getSignatureName( strFunctionName == null ? "" : strFunctionName, args );
for( IFunctionSymbol func : list )
{
if( func instanceof DynamicFunctionSymbol )
{
DynamicFunctionSymbol dfs = (DynamicFunctionSymbol)func;
String sig = DynamicFunctionSymbol.getSignatureName( dfs.getDisplayName(), dfs.getArgs() );
if( propertyNameWithArgs.equals( sig ) )
{
return dfs;
}
}
}
return null;
}
private List<TypeVariableDefinitionImpl> getTypeVarDefsFromDecl( IGenericTypeVariable[] typeVars )
{
if( typeVars == null || typeVars.length == 0 )
{
return Collections.emptyList();
}
if( typeVars == null )
{
return Collections.emptyList();
}
List<TypeVariableDefinitionImpl> result = new ArrayList<>( typeVars.length );
for( IGenericTypeVariable typeVar : typeVars )
{
result.add( (TypeVariableDefinitionImpl)typeVar.getTypeVariableDefinition() );
}
return result;
}
private DynamicFunctionSymbol findCorrespondingDeclDfs( int iOffsetName, int iModifiers )
{
ICompilableTypeInternal gsClass = getGosuClass();
if( gsClass == null )
{
return null;
}
Collection<DynamicFunctionSymbol> functions = Modifier.isStatic( iModifiers )
? gsClass.getParseInfo().getStaticFunctions()
: gsClass.getParseInfo().getMemberFunctions().values();
for( DynamicFunctionSymbol dfs : functions )
{
FunctionStatement funcStmt = dfs.getDeclFunctionStmt();
if( funcStmt != null && funcStmt.getNameOffset( null ) == iOffsetName )
{
return dfs;
}
}
return null;
}
boolean isDeclarationKeyword( String strKeyword )
{
return strKeyword != null &&
(Keyword.KW_function.equals( strKeyword ) ||
Keyword.KW_construct.equals( strKeyword ) ||
Keyword.KW_property.equals( strKeyword ) ||
//Keyword.KW_var.equals( strFunctionName ) ||
Keyword.KW_delegate.equals( strKeyword ) ||
Keyword.KW_class.equals( strKeyword ) ||
Keyword.KW_interface.equals( strKeyword ) ||
Keyword.KW_annotation.equals( strKeyword ) ||
Keyword.KW_structure.equals( strKeyword ) ||
Keyword.KW_enum.equals( strKeyword ));
}
static DynamicFunctionSymbol assignPossibleDuplicateDfs( DynamicFunctionSymbol dfsDecl, Iterable symbols )
{
DynamicFunctionSymbol result = dfsDecl;
if( dfsDecl != null && dfsDecl.getValueDirectly() != null )
{
int iMin = Integer.MAX_VALUE;
for( Object csr : symbols )
{
if( csr instanceof DynamicFunctionSymbol )
{
DynamicFunctionSymbol dfsCsr = (DynamicFunctionSymbol)csr;
if( dfsCsr.getName().toLowerCase().contains( "_duplicate_" + dfsDecl.getName().toLowerCase() ) && dfsCsr.getGosuClass() == dfsDecl.getGosuClass() )
{
String strName = dfsCsr.getName();
if( dfsCsr.getValueDirectly() == null )
{
int iIndex = Integer.parseInt( strName.substring( 0, strName.indexOf( '_' ) ) );
if( iIndex < iMin )
{
iMin = iIndex;
result = (DynamicFunctionSymbol)csr;
}
}
}
}
}
}
return result;
}
void addNameInDeclaration( String strName, int iOffsetName, int iLineNumName, int iColumnName, boolean bHasName )
{
NameInDeclaration name = new NameInDeclaration( strName );
pushExpression( name );
setLocation( bHasName ? iOffsetName : getTokenizer().getPriorToken().getTokenEnd(), iLineNumName, iColumnName, !bHasName, true );
popExpression();
}
private boolean parsingFunctionsEncloseMyClass()
{
if( _parsingFunctions.isEmpty() )
{
return true;
}
for( FunctionType ft : _parsingFunctions )
{
if( ft.getScriptPart().getContainingType() == getGosuClass() )
{
return false;
}
}
return true;
}
FunctionStatement parseProgramEntryPointBody()
{
DynamicFunctionSymbol dfsDecl = getProgramEntryPointDfs();
// if( _tokenizer.getInstructor() == null )
// parseClasspathStatements( true );
_symTable.pushIsolatedScope( new GosuParserTransparentActivationContext( getScriptPart() ) );
boolean bFunctionBodyParsed = true;
StatementList stmtList = null;
_bProgramCallFunction = true;
try
{
if( dfsDecl != null && parseProgramFunctionBody( (FunctionType)dfsDecl.getType() ) )
{
stmtList = (StatementList)popStatement();
}
else
{
bFunctionBodyParsed = false;
}
}
finally
{
_bProgramCallFunction = false;
_symTable.popScope();
}
FunctionStatement fs = new FunctionStatement();
fs.setDynamicFunctionSymbol( dfsDecl );
verify( fs, bFunctionBodyParsed, Res.MSG_EXPECTING_RETURN_TYPE_OR_FUN_BODY );
if (dfsDecl != null) {
dfsDecl.setValueDirectly( stmtList );
}
pushDynamicFunctionSymbol( dfsDecl );
fs.setDynamicFunctionSymbol( dfsDecl );
// if( bFunctionBodyParsed )
addDefaultReturnStmt( dfsDecl, stmtList );
pushStatement( fs );
return fs;
}
private DynamicFunctionSymbol getProgramEntryPointDfs()
{
String functionNameWithArgs = DynamicFunctionSymbol.getSignatureName( "evaluate", Collections.<ISymbol>singletonList( new Symbol( "symbols", JavaTypes.IEXTERNAL_SYMBOL_MAP(), null ) ) );
return (DynamicFunctionSymbol)_symTable.getSymbol( functionNameWithArgs );
}
private StatementList handleExpressionStatementList( Expression expr )
{
Statement stmt;
if( expr.hasParseExceptions() || expr.getType() != JavaTypes.pVOID() || expr instanceof NullExpression )
{
stmt = wrapProgramExpressionInReturnStmt( expr );
}
else if( expr instanceof MethodCallExpression )
{
stmt = new MethodCallStatement();
((MethodCallStatement)stmt).setMethodCall( (MethodCallExpression)expr );
}
else if( expr instanceof BeanMethodCallExpression )
{
stmt = new BeanMethodCallStatement();
((BeanMethodCallStatement)stmt).setBeanMethodCall( (BeanMethodCallExpression)expr );
}
else if( expr instanceof BlockInvocation )
{
stmt = new BlockInvocationStatement((BlockInvocation)expr);
}
else
{
throw new UnsupportedOperationException( "Did not expect expression of type: " + expr.getClass().getName() );
}
pushStatement( stmt );
ParseTree exprLoc = expr.getLocation();
setLocation( exprLoc.getOffset(), exprLoc.getLineNum(), exprLoc.getColumn(), true );
popStatement();
StatementList stmtList = new StatementList( _symTable );
List<Statement> stmts = Collections.singletonList( stmt );
stmtList.setStatements( stmts );
pushStatement( stmtList );
setLocation( exprLoc.getOffset(), exprLoc.getLineNum(), exprLoc.getColumn(), true );
return stmtList;
}
private ReturnStatement wrapProgramExpressionInReturnStmt( Expression e )
{
ReturnStatement retStmt = new ReturnStatement();
retStmt.setSynthetic( true );
e = possiblyWrapWithImplicitCoercion( e, JavaTypes.OBJECT() );
if( e.getType() == JavaTypes.pVOID() )
{
e.setType( JavaTypes.OBJECT() );
}
retStmt.setValue( e );
return retStmt;
}
private void addDefaultReturnStmt( DynamicFunctionSymbol dfsDecl, StatementList stmtList )
{
if( dfsDecl != null && !isTerminal( stmtList, dfsDecl.getReturnType() ) )
{
ReturnStatement defaultReturnStmt = new ReturnStatement();
ImplicitTypeAsExpression ta = new ImplicitTypeAsExpression();
ta.setLHS( new NullExpression() );
ta.setType( JavaTypes.OBJECT() );
ta.setCoercer( IdentityCoercer.instance() );
defaultReturnStmt.setValue( ta );
List<Statement> stmts;
if( stmtList.getStatements() == null )
{
//## todo: Probably short-circuit the condition when a program is an empty expression i.e., don't generate a class for it
stmts = new ArrayList<>( 2 );
}
else
{
stmts = new ArrayList<>( Arrays.asList( stmtList.getStatements() ) );
}
stmts.add( defaultReturnStmt );
stmtList.setStatements( stmts );
}
}
private boolean parseProgramFunctionBody( FunctionType type )
{
maybeSetExternalSymbols();
pushParsingFunction( type );
try
{
setDontOptimizeStatementLists( true );
int state = _tokenizer.mark();
int iLocationsCount = _locations.size();
try
{
parseProgramExpr();
Expression expr = popExpression();
verify( expr, match( null, SourceCodeTokenizer.TT_EOF ), Res.MSG_END_OF_EXPRESSION );
((IGosuProgramInternal)getGosuClass()).setExpression( expr );
verifyParsedElement( expr, false );
handleExpressionStatementList( expr );
}
catch( ParseResultsException exprErr )
{
boolean bProbablyProgram = !getTokenizer().isEOF();
_tokenizer.restoreToMark( state );
_stack.clear();
removeLocationsFrom( iLocationsCount );
try
{
if( !parseStatement( true ) )
{
// Couldn't parse a function body at all
return false;
}
verifyParsedElement( peekStatement(), false );
((IGosuProgramInternal)getGosuClass()).setStatement( peekStatement() );
}
catch( ParseResultsException stmtErr )
{
if( !bProbablyProgram )
{
// Note we can't just rethrow the original exception because we need
// the locations etc. in the parser, so we have to reparse and let it throw.
_tokenizer.restoreToMark( state );
_stack.clear();
removeLocationsFrom( iLocationsCount );
parseProgramExpr();
Expression expr = popExpression();
verify( expr, match( null, SourceCodeTokenizer.TT_EOF ), Res.MSG_END_OF_EXPRESSION );
final IGosuValidator validator = getValidator();
if( validator != null )
{
validator.validate( expr, getScript() );
}
handleExpressionStatementList( expr );
}
// else
// int iProgSourceLen = _tokenizer.getReader().getLength();
// int iProgStmtExtent = peekStatement().getLocation().getExtent() + 1;
// if( iProgStmtExtent < iProgSourceLen )
// ParseTree loc = peekStatement().getLocation();
// loc.setLength( iProgSourceLen - loc.getOffset() );
}
}
Statement body = peekStatement();
if( body instanceof StatementList )
{
((StatementList)body).setNoScope();
}
return true;
}
finally
{
popParsingFunction();
}
}
private void parseProgramExpr()
{
IType expectedReturnType = ((IGosuProgramInternal)getGosuClass()).getExpectedReturnType();
if( expectedReturnType != null )
{
parseExpression( new ContextType( expectedReturnType ), true );
}
else
{
parseExpression();
}
}
private void maybeSetExternalSymbols()
{
if( getGosuClass() instanceof IGosuProgram )
{
ISourceFileHandle sfh = getGosuClass().getSourceFileHandle();
if( sfh instanceof StringSourceFileHandle )
{
ISymbolTable extSyms = ((StringSourceFileHandle)sfh).getExternalSymbols();
if( extSyms != null )
{
// If extSyms is non-null, it usually means this program is for context-sensitive evaluation e.g., in a debugger
HashMap<String, ISymbol> map = new HashMap<>();
//noinspection unchecked
for( Symbol s: (Collection<Symbol>)extSyms.getSymbols().values() )
{
if( s.isLocal() )
{
map.put( s.getName(), s );
}
}
ExternalSymbolMapForMap extMap = new ExternalSymbolMapForMap( map );
((GosuProgramParseInfo)getGosuClass().getParseInfo()).setExternalSymbols( extMap );
}
}
}
}
void removeLocationsFrom( int iLocationsCount )
{
for( int i = _locations.size(); i > iLocationsCount; i
{
_locations.remove( i-1 );
}
}
private boolean isTerminal( Statement statement, IType returnType )
{
boolean[] bAbsolute = {false};
return returnType == JavaTypes.pVOID()
|| statement == null
|| statement.getLeastSignificantTerminalStatement( bAbsolute ) != null && bAbsolute[0];
}
private boolean parseFunctionBody( FunctionStatement functionStmt, FunctionType type )
{
pushParsingFunction( type );
try
{
if( (!(getGosuClass() instanceof IGosuProgram) || !((IGosuProgramInternal)getGosuClass()).isParsingExecutableProgramStatements()) &&
!match( null, null, '{', true ) )
{
Token T = getTokenizer().getCurrentToken();
eatStatementBlock( functionStmt, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
NotAStatement nas = new NotAStatement();
pushStatement( nas );
setLocation( T.getTokenStart(), T.getLine(), T.getTokenColumn() );
}
else if( !parseStatement( true, false ) )
{
return false;
}
Statement body = peekStatement();
if( body instanceof StatementList )
{
((StatementList)body).setNoScope();
}
return true;
}
finally
{
popParsingFunction();
}
}
DynamicFunctionSymbol parseFunctionDecl( ParsedElement element, ModifierInfo modifiers )
{
return parseFunctionDecl( element, false, false, modifiers );
}
DynamicFunctionSymbol parseFunctionDecl(ParsedElement element, boolean bProperty, boolean bGetter, ModifierInfo modifiers)
{
return parseFunctionDecl( element, null, bProperty, bGetter, modifiers );
}
DynamicFunctionSymbol parseFunctionDecl( ParsedElement element, String T, boolean bProperty, boolean bGetter, ModifierInfo modifiers )
{
_symTable.pushIsolatedScope( new FunctionDeclTransparentActivationContext( getScriptPart() ) );
try
{
boolean bHasName = true;
int iTokenStart;
Token token;
if( T == null )
{
int mark = getTokenizer().mark();
bHasName = verify( element, match( null, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_FUNCTION_DEF );
token = getTokenizer().getTokenAt( mark );
iTokenStart = token == null ? 0 : token.getTokenStart();
if( bHasName )
{
T = token.getStringValue();
}
}
else
{
// This must be the 'construct' token start position
token = getTokenizer().getPriorToken( true );
iTokenStart = token.getTokenStart();
}
if( element instanceof IParsedElementWithAtLeastOneDeclaration )
{
((IParsedElementWithAtLeastOneDeclaration)element).setNameOffset( iTokenStart, T );
}
String strFunctionName = T;
// if( strFunctionName == null )
// return null;
maybeEatNonDeclKeyword( bHasName, strFunctionName );
strFunctionName = strFunctionName == null ? "" : strFunctionName;
warn( element, !Keyword.isReservedKeyword( strFunctionName ), Res.MSG_IMPROPER_USE_OF_KEYWORD, strFunctionName );
ICompilableType gsClass = getGosuClass();
if( gsClass != null && strFunctionName.equals( gsClass.getRelativeName() ) && gsClass.isEnum() )
{
verify( element, Modifier.isPrivate( modifiers.getModifiers() ), Res.MSG_ENUM_CONSTRUCTOR_MUST_BE_PRIVATE );
}
if( token != null && element instanceof IParsedElementWithAtLeastOneDeclaration )
{
addNameInDeclaration( strFunctionName, token.getTokenStart(), token.getLine(), token.getTokenColumn(), bHasName );
}
HashMap<String, ITypeVariableDefinition> origTypeVarMap = new HashMap<>( getTypeVariables() );
// Parse generic type vars
int mark = _tokenizer.mark();
int iLocationsCount = _locations.size();
// first pass, just collect the vars, skipping bounds (this is to support forward reference of type vars within recursive type vars)
List<ITypeVariableDefinitionExpression> typeVarDefs = parseTypeVariableDefs( element, true, Collections.emptyList() );
IGenericTypeVariable[] typeVars = TypeVariableDefinition.getTypeVars( typeVarDefs );
// backtrack
_tokenizer.restoreToMark( mark );
removeLocationsFrom( iLocationsCount );
// second pass, including bounds
typeVarDefs = parseTypeVariableDefs( element, true, getTypeVarDefsFromDecl( typeVars ) );
// Must create function type and assign it as the type var's enclosing
// type *before* we parse the return type (in case it refs function's type vars)
typeVars = TypeVariableDefinition.getTypeVars( typeVarDefs );
strFunctionName = !bProperty
? strFunctionName
: ('@' + strFunctionName);
FunctionType type = new FunctionType( strFunctionName, JavaTypes.pVOID(), null, typeVars );
type.setEnclosingType( getGosuClass() );
if( bProperty && !typeVarDefs.isEmpty() )
{
verify( element, false, Res.MSG_GENERIC_PROPERTIES_NOT_SUPPORTED );
}
try
{
boolean bAnnotation = gsClass instanceof IGosuClass && ((IGosuClass)gsClass).isAnnotation() && JavaTypes.ANNOTATION().isAssignableFrom( gsClass );
verify( element, match( null, '(' ), Res.MSG_EXPECTING_LEFTPAREN_FUNCTION_DEF );
ArrayList<ISymbol> params = null;
IType[] paramTypes = null;
if( !match( null, ')' ) )
{
params = parseParameterDeclarationList( element, Modifier.isStatic( modifiers.getModifiers() ), null, bProperty, bGetter, false, false );
paramTypes = new IType[params.size()];
for( int i = 0; i < params.size(); i++ )
{
_symTable.putSymbol( params.get( i ) );
paramTypes[i] = params.get( i ).getType();
}
verify( element, match( null, ')' ), Res.MSG_EXPECTING_RIGHTPAREN_FUNCTION_DEF );
}
else
{
verify ( element, ! bProperty || bGetter, Res.MSG_PROPERTY_SET_MUST_HAVE_ONE_PARAMETER );
}
TypeLiteral typeLiteral;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
if( bGetter )
{
IType returnType = typeLiteral.getType().getType();
verify( typeLiteral, returnType != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
}
}
else
{
verify( element, !bAnnotation, Res.MSG_EXPECTING_RETURN_TYPE_OR_FUN_BODY );
String[] fakeT = {Keyword.KW_void.toString()};
typeLiteral = resolveTypeLiteral( fakeT );
verify( element, !bGetter, Res.MSG_MISSING_PROPERTY_RETURN );
}
Expression annotationDefault = null;
if( gsClass instanceof IGosuClass && ((IGosuClass)gsClass).isAnnotation() && JavaTypes.ANNOTATION().isAssignableFrom( gsClass ) &&
match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
IType ctxType = typeLiteral.getType().getType();
if( JavaTypes.ANNOTATION().isAssignableFrom( ctxType ) )
{
List<IGosuAnnotation> anno = new ArrayList<>( 1 );
getOwner().parseAnnotation( anno );
annotationDefault = (Expression)anno.get( 0 ).getExpression();
}
else
{
parseExpression( new ContextType( ctxType, false, true ) );
annotationDefault = popExpression();
}
if( !annotationDefault.hasParseExceptions() )
{
verify( annotationDefault,
annotationDefault.isCompileTimeConstant() && !(annotationDefault instanceof NullExpression), // for some unknown reason the JVM does not allow null default values
Res.MSG_COMPILE_TIME_CONSTANT_REQUIRED );
}
}
type.setArgumentTypes( paramTypes );
type.setRetType( typeLiteral.getType().getType() );
type.setScriptPart( getScriptPart() );
type.setModifiers( modifiers.getModifiers() );
DynamicFunctionSymbol dfs;
if( gsClass instanceof IGosuEnhancement && !Modifier.isStatic( modifiers.getModifiers() ) )
{
dfs = new EnhancementDynamicFunctionSymbol( _symTable, strFunctionName, type, params, ((IGosuEnhancement)gsClass).getEnhancedType() );
transferModifierInfo( element, modifiers, AnnotationUseSiteTarget.receiver, (EnhancementDynamicFunctionSymbol)dfs );
}
else
{
dfs = new DynamicFunctionSymbol( _symTable, strFunctionName, type, params, (Statement)null );
}
dfs.setScriptPart( getScriptPart() );
if( gsClass != null && gsClass.isInterface() && !match( null, null, '{', true ) )
{
modifiers.addModifiers( Modifier.ABSTRACT );
dfs.setAbstract( true );
}
dfs.setModifierInfo( modifiers );
dfs.setAnnotationDefault( annotationDefault );
if( element instanceof FunctionStatement )
{
dfs.setDeclFunctionStmt( (FunctionStatement)element );
}
verifyFunction( dfs, element );
int iDupIndex = nextIndexOfErrantDuplicateDynamicSymbol( dfs, _dfsDeclByName.get( dfs.getDisplayName() ), true );
if( iDupIndex >= 0 )
{
dfs.renameAsErrantDuplicate( iDupIndex );
}
putDfsDeclInSetByName( dfs );
return dfs;
}
finally
{
setTypeVariables( origTypeVarMap );
}
}
finally
{
_symTable.popScope();
}
}
boolean maybeEatNonDeclKeyword( boolean bHasName, String strFunctionName )
{
return !bHasName && strFunctionName != null && strFunctionName.length() > 0 &&
!isDeclarationKeyword( strFunctionName ) &&
match( null, SourceCodeTokenizer.TT_KEYWORD );
}
private void verifyFunction( DynamicFunctionSymbol dfs, ParsedElement element )
{
boolean bValidOverrideFound = false;
List<IFunctionSymbol> functions = getDfsDeclsForFunction( dfs.getDisplayName() );
functions = maybeAddPrivateFunctionsIfSuperInSamePackage( dfs.getDisplayName(), functions );
for( IFunctionSymbol existing : functions )
{
if( existing instanceof DynamicFunctionSymbol )
{
DynamicFunctionSymbol dfsExisting = (DynamicFunctionSymbol)existing;
if( areDFSsInSameNameSpace( dfs, dfsExisting ))
{
// if the parameters match exactly-ish
if( areParametersEquivalent( dfs, dfsExisting ) ||
areParametersEquivalent_Enhancement( dfs, dfsExisting ) ||
areParametersEquivalent_Enhancement( dfsExisting, dfs ) )
// || propertySettersAreCovariant( dfs, dfsExisting ) )
{
IGosuClass owningTypeForDfs = getOwningTypeForDfs( dfsExisting );
ICompilableTypeInternal gsClass = getGosuClass();
if( owningTypeForDfs instanceof IGosuEnhancement )
{
if( dfs.isOverride() || owningTypeForDfs == gsClass )
{
addError( element, Res.MSG_CANNOT_OVERRIDE_FUNCTION_FROM_ENHANCEMENT );
}
else
{
warn( element, false, Res.MSG_MASKING_ENHANCEMENT_METHODS_MAY_BE_CONFUSING );
}
}
else
{
boolean bSameButNotInSameClass = !GosuObjectUtil.equals( dfsExisting.getScriptPart(), dfs.getScriptPart() );
if( !verify( element, bSameButNotInSameClass, Res.MSG_FUNCTION_ALREADY_DEFINED, dfs.getMethodSignature(), getScriptPart() ) )
{
return;
}
if( !verify( element, dfs.isStatic() || !dfsExisting.isStatic(), Res.MSG_FUNCTION_ALREADY_DEFINED, dfs.getMethodSignature(), getScriptPart() ) )
{
// non-static method cannot override/shadow static
return;
}
boolean bClassAndReturnTypesCompatible = !GosuObjectUtil.equals( dfsExisting.getScriptPart(), dfs.getScriptPart() ) &&
returnTypesCompatible( dfsExisting, dfs );
if( verify( element, bClassAndReturnTypesCompatible, dfsExisting.isPrivate() ? Res.MSG_RENAME_METHOD : Res.MSG_FUNCTION_CLASH,
dfs.getName(), dfs.getScriptPart(), dfsExisting.getName(), dfsExisting.getScriptPart() ) )
{
boolean b = !dfsExisting.isFinal() && (gsClass == null || gsClass.getSupertype() == null || !gsClass.getSupertype().isFinal());
verify( element, b, Res.MSG_CANNOT_OVERRIDE_FINAL, dfsExisting.getName(), dfsExisting.getScriptPart() );
if( verify( element, !dfs.isStatic() || dfsExisting.isStatic(), Res.MSG_STATIC_METHOD_CANNOT_OVERRIDE, dfs.getName(), dfsExisting.getDeclaringTypeInfo().getName() ) )
{
if( !dfs.isStatic() && !dfsExisting.isStatic() )
{
IGosuClassInternal existingDeclaringClass = dfsExisting.getGosuClass();
boolean bDefaultMethodOverridesClassMethod = gsClass.isInterface() && !dfs.isAbstract() && existingDeclaringClass != null && existingDeclaringClass.isProxy() && existingDeclaringClass.getJavaType() == JavaTypes.IGOSU_OBJECT();
if( verify( element, !bDefaultMethodOverridesClassMethod, Res.MSG_OVERRIDES_OBJECT_METHOD, dfs.getName(), dfsExisting.getDeclaringTypeInfo().getName() ) )
{
if( !dfs.isOverride() )
{
boolean bIsConstructorName = gsClass != null && gsClass.getRelativeName().equals( dfs.getDisplayName() );
if( !dfsExisting.isPrivate() )
{
warn( element, bIsConstructorName || element instanceof VarStatement, Res.MSG_MISSING_OVERRIDE_MODIFIER, dfsExisting.getName(), dfsExisting.getScriptPart().getContainingTypeName() );
}
if( !bIsConstructorName )
{
// Set the override modifier when the modifier is missing
dfs.setOverride( true );
}
}
verifyNotWeakerAccess( element, dfs, dfsExisting );
verifySameNumberOfFunctionTypeVars( element, dfs, dfsExisting );
dfs.setSuperDfs( dfsExisting );
bValidOverrideFound = true;
}
}
}
}
}
}
else
{
// if the parameters do not match, but reify to the same IR types, it is an error
verify( element, !doParametersReifyToSameBytecodeType( dfs, dfsExisting ), Res.MSG_METHOD_REIFIES_TO_SAME_SIGNATURE_AS_ANOTHER_METHOD );
verify( element, !propertyTypeDiffers( dfs, dfsExisting ), Res.MSG_PROPERTY_OVERRIDES_WITH_INCOMPATIBLE_TYPE );
verify( element, dfs.getName().startsWith( "@" ) || // there's already an error re not allowing a default value for property setter
!dfs.hasOptionalParameters() && !dfsExisting.hasOptionalParameters(), Res.MSG_OVERLOADING_NOT_ALLOWED_WITH_OPTIONAL_PARAMS );
}
}
}
}
if( !bValidOverrideFound )
{
verifyOverrideNotOnMethodThatDoesNotExtend( element, dfs );
}
verifyReified( bValidOverrideFound, element, dfs );
verifyNoImplicitPropertyMethodConflicts( element, dfs );
}
private void verifyReified( boolean bValidOverrideFound, ParsedElement element, DynamicFunctionSymbol dfs )
{
if( bValidOverrideFound )
{
DynamicFunctionSymbol superDfs = dfs.getSuperDfs();
boolean bOverridesJavaMethod = IGosuClass.ProxyUtil.isProxy( superDfs.getDeclaringTypeInfo().getOwnersType() );
if( !bOverridesJavaMethod )
{
// If overriding a Gosu function, be sure both functions are reified or neither
verify( element, dfs.isReified() == superDfs.isReified(), Res.MSG_REIFIED_DONT_MATCH );
}
else
{
warn( element, dfs.isReified() == superDfs.isReified(), Res.MSG_REIFIED_DONT_MATCH_JAVA, dfs.getName(), superDfs.getDeclaringTypeInfo().getOwnersType().getRelativeName() );
}
}
if( dfs.isReified() )
{
verify( element, dfs.getType().isGenericType() ||
!dfs.isStatic() && dfs.getDeclaringTypeInfo().getOwnersType() instanceof IGosuEnhancement, Res.NOTHING_TO_REIFY );
}
}
// private boolean propertySettersAreCovariant( DynamicFunctionSymbol dfs, DynamicFunctionSymbol dfsExisting )
// return dfs.getDisplayName().startsWith( "@" ) &&
// dfs.getArgTypes().length > 0 && dfsExisting.getArgTypes().length > 0 &&
// dfsExisting.getArgTypes()[0].isAssignableFrom( dfs.getArgTypes()[0] );
private boolean areParametersEquivalent_Enhancement( DynamicFunctionSymbol dfs1, DynamicFunctionSymbol dfs2 )
{
return !dfs1.isStatic() &&
dfs2.isStatic() &&
dfs1.getDeclaringTypeInfo().getOwnersType() instanceof IGosuEnhancement &&
areParametersEquivalent( dfs1, dfs2, ((IGosuEnhancement)dfs1.getDeclaringTypeInfo().getOwnersType()).getEnhancedType() );
}
// Since we compile private methods as internal (so inner classes have easier access) we must
// prevent subclasses from implicitly overriding "private" methods. Note this is only when
// the super class is in the same package as the subclass.
private List<IFunctionSymbol> maybeAddPrivateFunctionsIfSuperInSamePackage( String name, List<IFunctionSymbol> functions )
{
ICompilableTypeInternal gsClass = getGosuClass();
if( gsClass == null )
{
return functions;
}
IType supertype = gsClass.getSupertype();
if( gsClass instanceof IGosuClass && supertype != null )
{
if( TypeLord.getOuterMostEnclosingClass( supertype ).getNamespace().equals(
TypeLord.getOuterMostEnclosingClass( gsClass ).getNamespace() ) )
{
functions = new ArrayList<>( functions );
addAllNonstaticPrivateMethods( name, ((IGosuClassInternal)gsClass).getSuperClass(), functions );
}
}
return functions;
}
private void addAllNonstaticPrivateMethods( String name, IGosuClassInternal superClass, List<IFunctionSymbol> functions )
{
if( superClass == null )
{
return;
}
functions.addAll( superClass.getParseInfo().getMemberFunctions()
.values()
.stream()
.filter( e -> e.isPrivate() && e.getDisplayName().equals( name ) )
.collect( Collectors.toList() ) );
addAllNonstaticPrivateMethods( name, superClass.getSuperClass(), functions );
}
private boolean propertyTypeDiffers( DynamicFunctionSymbol dfs, DynamicFunctionSymbol dfsExisting )
{
return dfs.getDisplayName().startsWith( "@" ) &&
dfs.getArgTypes().length > 0 && dfsExisting.getArgTypes().length > 0 &&
dfs.getArgTypes()[0] != dfsExisting.getArgTypes()[0];
// !dfsExisting.getArgTypes()[0].isAssignableFrom( dfs.getArgTypes()[0] )
}
private void verifySameNumberOfFunctionTypeVars( ParsedElement element, DynamicFunctionSymbol dfs, DynamicFunctionSymbol dfsExisting )
{
FunctionType dfsType = (FunctionType)dfs.getType();
FunctionType existingDfsType = (FunctionType)dfsExisting.getType();
IGenericTypeVariable[] typeVars = dfsType.getTypeVariables();
IGenericTypeVariable[] existingTypeVars = existingDfsType.getTypeVariables();
verify( element, existingTypeVars.length == typeVars.length, Res.MSG_OVERRIDING_FUNCTION_MUST_HAVE_SAME_NUMBER_OF_TYPE_VARS, existingTypeVars.length );
}
private boolean areDFSsInSameNameSpace( IDynamicSymbol newDfs, IDynamicSymbol existingDfs )
{
IGosuClass newType = getOwningTypeForDfs( newDfs );
IType existingType = getOwningTypeForDfs( existingDfs );
if( newType == null )
{
return existingType == null;
}
else if( newType.isAnonymous() || newType.getEnclosingType() != null )
{
if( existingType instanceof IGosuEnhancement )
{
existingType = ((IGosuEnhancement)existingType).getEnhancedType();
}
if( IGosuClass.ProxyUtil.isProxy( existingType ) )
{
IType type = IGosuClass.ProxyUtil.getProxiedType( existingType );
return type.isAssignableFrom( newType ) || JavaTypes.IGOSU_OBJECT().equals( type );
}
else
{
return existingType.isAssignableFrom( newType );
}
}
else
{
return true;
}
}
private void verifyNoImplicitPropertyMethodConflicts( ParsedElement element, DynamicFunctionSymbol dfs )
{
String name = dfs.getDisplayName();
if( name.startsWith( "@" ) )
{
String propName = name.substring( 1 );
if( dfs.getArgs().size() == 0 )
{
for( IFunctionSymbol func : getDfsDeclsForFunction( "get" + propName ) )
{
if( func instanceof DynamicFunctionSymbol )
{
DynamicFunctionSymbol existingDfs = (DynamicFunctionSymbol)func;
if( areDFSsInSameNameSpace( dfs, existingDfs ) )
{
verify( element, existingDfs.getArgs().size() != 0, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, existingDfs.getName(), propName );
}
}
}
}
else
{
for( IFunctionSymbol func : getDfsDeclsForFunction( "set" + propName ) )
{
if( func instanceof DynamicFunctionSymbol )
{
DynamicFunctionSymbol existingDfs = (DynamicFunctionSymbol)func;
if( areDFSsInSameNameSpace( dfs, existingDfs ) )
{
verifyPropertySetterConflictsWithFunction( element, dfs, propName, existingDfs );
}
}
}
}
}
else
{
if( name.startsWith( "set" ) && dfs.getArgs().size() == 1 )
{
ISymbol symbol = getSymbolTable().getSymbol( name.substring( 3, name.length() ) );
if( symbol instanceof DynamicPropertySymbol )
{
DynamicPropertySymbol dps = (DynamicPropertySymbol)symbol;
if( areDFSsInSameNameSpace( dfs, dps ) )
{
verifyFunctionConflictsWithPropoertySetter( element, dfs, dps );
}
}
}
else
{
boolean bIs;
if( ((bIs = name.startsWith( "is" )) || name.startsWith( "get" )) && dfs.getArgs().size() == 0 )
{
ISymbol symbol = getSymbolTable().getSymbol( name.substring( bIs ? 2 : 3, name.length() ) );
if( symbol instanceof DynamicPropertySymbol )
{
DynamicPropertySymbol dps = (DynamicPropertySymbol)symbol;
if( areDFSsInSameNameSpace( dfs, dps ) )
{
DynamicFunctionSymbol getterDfs = dps.getGetterDfs();
verify( element, getterDfs == null || !NameResolver.getFunctionName( dfs ).equals( NameResolver.getFunctionName( getterDfs ) ),
Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, dfs.getName(), dps.getName() );
}
}
}
}
}
}
void verifyFunctionConflictsWithPropoertySetter( ParsedElement element, DynamicFunctionSymbol dfs, DynamicPropertySymbol dps )
{
if( dps.getSetterDfs() != null )
{
IType argType = dfs.getArgs().get( 0 ).getType();
if( argType.equals( dps.getType() ) )
{
verify( element, false, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, dfs.getName(), dps.getName() );
}
else if( doTypesReifyToTheSameBytecodeType( argType, dps.getType() ) )
{
verify( element, false, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT_UPON_REIFICATION, dfs.getName(), dps.getName() );
}
}
}
void verifyPropertySetterConflictsWithFunction( ParsedElement element, DynamicFunctionSymbol dfs, String propName, DynamicFunctionSymbol existingDfs )
{
if( existingDfs.getArgs().size() == 1 )
{
IType argType = dfs.getArgs().get( 0 ).getType();
IType existingArgType = existingDfs.getArgs().get( 0 ).getType();
if( argType.equals( existingArgType ) )
{
verify( element, false, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, existingDfs.getName(), propName );
}
if( doTypesReifyToTheSameBytecodeType( argType, existingArgType ) )
{
verify( element, false, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT_UPON_REIFICATION, existingDfs.getName(), propName );
}
}
}
private boolean returnTypesCompatible( DynamicFunctionSymbol dfsExisting, DynamicFunctionSymbol dfs )
{
IType existingReturnType = maybeResolveFunctionTypeVars( dfsExisting, dfsExisting.getReturnType() );
IType overrideReturnType = maybeResolveFunctionTypeVars( dfs, dfs.getReturnType() );
if( existingReturnType.isAssignableFrom( overrideReturnType ) || StandardCoercionManager.isStructurallyAssignable( existingReturnType, overrideReturnType ) )
{
return true;
}
else {
return dfs.getReturnType() instanceof ErrorType;
}
}
private boolean areParametersEquivalent( IDynamicFunctionSymbol dfs, IDynamicFunctionSymbol dfsExisting, IType... extraParams )
{
IType[] args = ((FunctionType)dfs.getType()).getParameterTypes();
IType[] toArgs = ((FunctionType)dfsExisting.getType()).getParameterTypes();
if( extraParams != null && extraParams.length > 0 )
{
// These are inserted at beginning, to handle case in enhancement where static
// function and non-static function can conflict in bytecode because both are
// static and the non-static one has an implicit 1st pararm that is the enhanced type
IType[] argsPlus = new IType[args.length+extraParams.length];
System.arraycopy( extraParams, 0, argsPlus, 0, extraParams.length );
System.arraycopy( args, 0, argsPlus, extraParams.length, args.length );
args = argsPlus;
}
return _areParametersEquivalent( dfs, dfsExisting, args, toArgs );
}
private boolean _areParametersEquivalent( IDynamicFunctionSymbol dfs1, IDynamicFunctionSymbol dfs2, IType[] args, IType[] toArgs )
{
if( args == null || args.length == 0 )
{
return toArgs == null || toArgs.length == 0;
}
if( toArgs == null )
{
return false;
}
if( args.length != toArgs.length )
{
return false;
}
for( int i = 0; i < args.length; i++ )
{
IType argType = maybeResolveFunctionTypeVars( dfs1, args[i] );
IType toArgType = maybeResolveFunctionTypeVars( dfs2, toArgs[i] );
if( !argType.equals( toArgType ) )
{
// Note we use equals() check instead of == to handle non-loadable types e.g., TypeVariableTypes
return false;
}
}
return true;
}
private IType maybeResolveFunctionTypeVars( IDynamicFunctionSymbol dfs, IType type )
{
if (TypeSystem.isDeleted(type)) {
return TypeSystem.getErrorType();
}
FunctionType funcType = (FunctionType)dfs.getType();
ArrayList<IType> functionTypeVars = new ArrayList<>();
IType declaringType = dfs.getScriptPart() == null ? null : dfs.getScriptPart().getContainingType();
boolean bConstructor = declaringType != null && dfs.getDisplayName().equals( declaringType.getRelativeName() );
if( bConstructor )
{
if( declaringType.isGenericType() && !declaringType.isParameterizedType() )
{
for( IGenericTypeVariable tv: declaringType.getGenericTypeVariables() )
{
functionTypeVars.add( tv.getTypeVariableDefinition().getType() );
}
}
}
else
{
for( IGenericTypeVariable tv : funcType.getTypeVariables() )
{
functionTypeVars.add( tv.getTypeVariableDefinition().getType() );
}
}
return TypeLord.boundTypes( type, functionTypeVars );
}
public boolean doParametersReifyToSameBytecodeType( IDynamicFunctionSymbol dfs, IDynamicFunctionSymbol dfsExisting )
{
IType[] toArgs = ((FunctionType) dfsExisting.getType()).getParameterTypes();
IType[] args = ((FunctionType)dfs.getType()).getParameterTypes();
if( args == null || args.length == 0 )
{
return toArgs == null || toArgs.length == 0;
}
if( toArgs == null )
{
return false;
}
if( args.length != toArgs.length )
{
return false;
}
for( int i = 0; i < args.length; i++ )
{
if( !doTypesReifyToTheSameBytecodeType( toArgs[i], args[i] ) )
{
return false;
}
}
return true;
}
boolean doTypesReifyToTheSameBytecodeType( IType toArg, IType arg )
{
IRType toArgType = IRElement.maybeEraseStructuralType( null, IRTypeResolver.getDescriptor( toArg ) );
IRType argType = IRElement.maybeEraseStructuralType( null, IRTypeResolver.getDescriptor( arg ) );
return argType.equals( toArgType );
}
private IGosuClass getOwningTypeForDfs( IDynamicSymbol dfs )
{
if( dfs.getScriptPart() != null && dfs.getScriptPart().getContainingType() instanceof IGosuClass )
{
return (IGosuClass)dfs.getScriptPart().getContainingType();
}
else
{
return null;
}
}
private void verifyNotWeakerAccess( ParsedElement element, DynamicFunctionSymbol dfs, DynamicFunctionSymbol dfsExisting )
{
if( dfsExisting.isPublic() )
{
verify( element, dfs.isPublic(),
Res.MSG_ATTEMPTING_TO_ASSIGN_WEAKER_ACCESS_PRIVILEGES,
dfs.getName(), dfs.getScriptPart(),
dfsExisting.getName(), dfsExisting.getScriptPart() );
}
else if( dfsExisting.isProtected() )
{
verify( element, dfs.isPublic() || dfs.isProtected(),
Res.MSG_ATTEMPTING_TO_ASSIGN_WEAKER_ACCESS_PRIVILEGES,
dfs.getName(), dfs.getScriptPart(),
dfsExisting.getName(), dfsExisting.getScriptPart() );
}
else if( dfsExisting.isInternal() )
{
verify( element, dfs.isPublic() || dfs.isProtected() || dfs.isInternal(),
Res.MSG_ATTEMPTING_TO_ASSIGN_WEAKER_ACCESS_PRIVILEGES,
dfs.getName(), dfs.getScriptPart(),
dfsExisting.getName(), dfsExisting.getScriptPart() );
}
}
public ArrayList<ISymbol> parseParameterDeclarationList( IParsedElement element, boolean bStatic, List<IType> inferredArgumentTypes )
{
return parseParameterDeclarationList( element, bStatic, inferredArgumentTypes, false, false, false, false );
}
public ArrayList<ISymbol> parseParameterDeclarationList( IParsedElement element, boolean bStatic, List<IType> inferredArgumentTypes, boolean bProperty, boolean bGetter, boolean bEmpty, boolean bVarDynamicArg )
{
ArrayList<ISymbol> params = new ArrayList<>();
Token T = new Token();
int iParamPos = -1;
boolean bOptionalParamsStarted = false;
int iOffsetList = getTokenizer().getTokenStart();
int iLineNumList = getTokenizer().getLineNumber();
int iColumnList = getTokenizer().getTokenColumn();
do
{
iParamPos++;
int iOffsetParam = getTokenizer().getTokenStart();
int iLineNumParam = getTokenizer().getLineNumber();
int iColumnParam = getTokenizer().getTokenColumn();
List<IGosuAnnotation> annotations = parseLocalAnnotations( Collections.<IGosuAnnotation>emptyList() );
boolean bFinal = match( T, Keyword.KW_final );
int iOffsetArgIdentifier = getTokenizer().getTokenStart();
int iColumnArgIdentifier = getTokenizer().getTokenColumn();
int iLineArgIdentifier = getTokenizer().getLineNumber();
Token tokenBeforeParam = getTokenizer().getCurrentToken();
boolean bMatchColonWithoutName = false;
if( bEmpty || !verify( (ParsedElement)element, match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_ARGS, "" ) )
{
bMatchColonWithoutName = match( null, ":", SourceCodeTokenizer.TT_OPERATOR, true );
if( !bMatchColonWithoutName )
{
break;
}
else
{
T._strValue = "";
}
}
ModifierListClause e = new ModifierListClause();
pushExpression( e );
boolean bZeroLength = tokenBeforeParam.getTokenStart() <= iOffsetParam;
setLocation( iOffsetParam, iLineNumParam, iColumnParam, bZeroLength, true );
popExpression();
String strArgIdentifier = T._strValue;
ParameterDeclaration parameterIdentifier = new ParameterDeclaration( strArgIdentifier );
addNameInDeclaration( strArgIdentifier, iOffsetArgIdentifier, iLineArgIdentifier, iColumnArgIdentifier, strArgIdentifier.length() > 0 || bMatchColonWithoutName );
ISymbol existingSymbol = _symTable.getSymbol( strArgIdentifier );
if( existingSymbol == null )
{
for( ISymbol symbol : params )
{
if( symbol.getName().equals( strArgIdentifier ) )
{
existingSymbol = symbol;
}
}
}
boolean bSymbolConflict = existingSymbol != null;
if( bStatic && existingSymbol instanceof DynamicSymbol )
{
bSymbolConflict = existingSymbol.isStatic();
}
if( !bSymbolConflict )
{
IGosuClassInternal anonClass = getParsingAnonymousClass();
if( anonClass != null && !isParsingAnnotation() )
{
// Conflicts with potential captured symbols?
bSymbolConflict = captureSymbol( anonClass, strArgIdentifier, null ) != null;
}
}
verify( parameterIdentifier, !bSymbolConflict, Res.MSG_VARIABLE_ALREADY_DEFINED, strArgIdentifier );
verify( parameterIdentifier, ! bProperty || bGetter || iParamPos == 0, Res.MSG_PROPERTY_SET_MUST_HAVE_ONE_PARAMETER );
IType argType;
Expression defExpr = null;
boolean bColonFound = match( null, ":", SourceCodeTokenizer.TT_OPERATOR );
boolean bEqualsFound = match( null, "=", SourceCodeTokenizer.TT_OPERATOR );
boolean bParenFound = match( null, null, '(', true );
if( (inferredArgumentTypes != null || bVarDynamicArg) && !bColonFound && !bEqualsFound && !bParenFound )
{
if( inferredArgumentTypes != null && inferredArgumentTypes.size() > iParamPos )
{
argType = inferredArgumentTypes.get(iParamPos);
}
else if( bVarDynamicArg )
{
argType = IDynamicType.instance();
}
else
{
argType = ErrorType.getInstance();
}
}
else
{
if( bParenFound )
{
parseBlockLiteral();
}
else
{
verify( parameterIdentifier, bColonFound || bEqualsFound, Res.MSG_EXPECTING_TYPE_FUNCTION_DEF );
if( bEqualsFound )
{
parseExpression( new ContextType( null, false, true ) );
}
else
{
parseTypeLiteral();
}
}
Expression expr = popExpression();
if( bEqualsFound )
{
defExpr = expr;
argType = defExpr.hasParseExceptions()
? JavaTypes.pVOID()
: expr.getType();
verify( expr, !(expr instanceof NullExpression), Res.MSG_VARIABLE_MUST_HAVE_NON_NULL_TYPE );
}
else
{
argType = ((TypeLiteral)expr).getType().getType();
}
verify( expr, bEqualsFound || argType != JavaTypes.pVOID(), Res.MSG_VOID_NOT_ALLOWED );
if( bColonFound && match( null, "=", SourceCodeTokenizer.TT_OPERATOR ) )
{
int iOffsetDef = getTokenizer().getTokenStart();
int iLineNumDef = getTokenizer().getLineNumber();
int iColumnDef = getTokenizer().getTokenColumn();
parseExpression( new ContextType( argType, false, true ) );
defExpr = popExpression();
defExpr = possiblyWrapWithImplicitCoercion( defExpr, argType );
pushExpression( defExpr );
setLocation( iOffsetDef, iLineNumDef, iColumnDef );
popExpression();
}
verify( defExpr, defExpr == null || !bProperty, Res.MSG_DEFAULT_VALUE_NOT_ALLOWED );
}
if( strArgIdentifier != null )
{
Symbol symbol = new TypedSymbol( strArgIdentifier, argType, _symTable, null, SymbolType.PARAMETER_DECLARATION );
if( defExpr != null )
{
verifyComparable( argType, defExpr );
verify( defExpr, defExpr.isCompileTimeConstant(), Res.MSG_COMPILE_TIME_CONSTANT_REQUIRED );
verify( (ParsedElement)element, argType != JavaTypes.pVOID() || !defExpr.hasParseExceptions(),
Res.MSG_PARAM_TYPE_CANT_BE_INFERRED_FROM_LATE_BOUND_EXPRESSION );
symbol.setDefaultValueExpression( defExpr );
bOptionalParamsStarted = true;
}
else
{
verify( parameterIdentifier, !bOptionalParamsStarted, Res.MSG_EXPECTING_DEFAULT_VALUE );
}
symbol.setFinal( bFinal );
symbol.getModifierInfo().setAnnotations( annotations );
params.add( symbol );
parameterIdentifier.setType( argType );
pushExpression( parameterIdentifier );
setLocation( iOffsetParam, iLineNumParam, iColumnParam, true );
popExpression();
if( getGosuClass() instanceof IGosuClassInternal && ((IGosuClassInternal)getGosuClass()).isCompilingDefinitions() )
{
verifyModifiers( parameterIdentifier, symbol.getModifierInfo(), UsageTarget.ParameterTarget );
}
}
}
while( match( null, ',' ) );
ParameterListClause e = new ParameterListClause();
pushExpression( e );
boolean bZeroLength = getTokenizer().getTokenStart() <= iOffsetList;
setLocation( bZeroLength ? getTokenizer().getPriorToken().getTokenEnd() : iOffsetList, iLineNumList, iColumnList, bZeroLength, true );
popExpression();
params.trimToSize();
return params;
}
private List<IGosuAnnotation> parseLocalAnnotations( List<IGosuAnnotation> annotations ) {
while( match( null, null, '@', true ) )
{
if( getOwner() == null )
{
match( null, '@' );
throw new IllegalStateException( "Found null owning parser" );
}
if( annotations.isEmpty() )
{
annotations = new ArrayList<>( 2 );
}
parseAnnotation( annotations );
}
return annotations;
}
// type-variables
// < <type-variable-list> >
List<ITypeVariableDefinitionExpression> parseTypeVariableDefs( ParsedElement parsedElem, boolean bFunction, List<TypeVariableDefinitionImpl> typeVarDefListFromDecl )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( !match( null, "<", SourceCodeTokenizer.TT_OPERATOR ) )
{
return Collections.emptyList();
}
List<ITypeVariableDefinitionExpression> typeVarDefList = parseTypeVariableDefList( parsedElem, bFunction, typeVarDefListFromDecl );
if( verify( parsedElem, match( null, ">", SourceCodeTokenizer.TT_OPERATOR ),
Res.MSG_EXPECTING_CLOSING_ANGLE_BRACKET_FOR_TYPE_VAR_LIST ) )
{
verify( parsedElem, getGosuClass() == null || !(((IGosuClass)getGosuClass()).isAnnotation() && JavaTypes.ANNOTATION().isAssignableFrom( getGosuClass() )), Res.MSG_GENERIC_ANNOTATIONS_NOT_SUPPORTED );
}
// For editors only
TypeVariableListClause e = new TypeVariableListClause( typeVarDefList );
pushExpression( e );
setLocation( iOffset, iLineNum, iColumn, true );
popExpression();
return typeVarDefList;
}
// type-variable-list
// <type-variable>
// <type-variable-list> , <type-variable>
List<ITypeVariableDefinitionExpression> parseTypeVariableDefList( ParsedElement parsedElem, boolean bForFunction, List<TypeVariableDefinitionImpl> typeVarDefListFromDecl )
{
List<ITypeVariableDefinitionExpression> typeVarDefList = new ArrayList<>();
Map<String, ITypeVariableDefinition> typeVarMap = getTypeVariables();
List<TypeVariableDefinition> defs = new ArrayList<>();
if( typeVarDefListFromDecl != null )
{
for( int i = 0; i < typeVarDefListFromDecl.size(); i++ )
{
TypeVariableDefinitionImpl tvd = typeVarDefListFromDecl.get( i );
TypeVariableDefinition typeVarDef = new TypeVariableDefinition( getEnclosingType(), bForFunction );
defs.add( typeVarDef );
if( typeVarDefListFromDecl != null && !typeVarDefListFromDecl.isEmpty() )
{
typeVarDef.setTypeVarDef( tvd );
}
if( !typeVarExists( typeVarMap, typeVarDef ) )
{
// Add all type vars ahead of declaration parsing to enable forward referencing of typevars
getTypeVariables().put( tvd.getName(), typeVarDef );
}
}
}
int i = 0;
do
{
TypeVariableDefinition typeVarDef = defs.isEmpty()
? new TypeVariableDefinition( getEnclosingType(), bForFunction )
: defs.get( i++ );
parseTypeVariableDefinition( parsedElem, typeVarDef, defs.isEmpty() );
typeVarDef = (TypeVariableDefinition)popExpression();
for( ITypeVariableDefinition csr : _typeVarsByName.values() )
{
if( !verify( typeVarDef, !csr.getName().equals( typeVarDef.getName() ) ||
((TypeVariableDefinition)csr).getLocation().getExtent() == typeVarDef.getLocation().getExtent(),
Res.MSG_VARIABLE_ALREADY_DEFINED, typeVarDef.getName() ) )
{
break;
}
}
typeVarDefList.add( typeVarDef );
}
while( match( null, ',' ) );
return typeVarDefList;
}
private boolean typeVarExists( Map<String, ITypeVariableDefinition> typeVarMap, TypeVariableDefinition typeVarDef )
{
if( !typeVarMap.containsKey( typeVarDef.getName() ) )
{
return false;
}
if( isParsingStaticFeature() )
{
ITypeVariableDefinition v = typeVarMap.get( typeVarDef.getName() );
if( v == null || !v.getEnclosingType().equals( typeVarDef.getEnclosingType() ) )
{
return false;
}
}
return true;
}
// type-variable
// <identifier> [extends <type-literal>]
void parseTypeVariableDefinition( ParsedElement parsedElem, TypeVariableDefinition typeVarDef, boolean bFirstPass )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( _parseTypeVariableDefinition( parsedElem, typeVarDef, bFirstPass ) )
{
setLocation( iOffset, iLineNum, iColumn );
}
}
boolean _parseTypeVariableDefinition( ParsedElement parsedElem, TypeVariableDefinition typeVarDef, boolean bFirstPass )
{
Token T = new Token();
parseVariance( parsedElem, typeVarDef );
if( verify( parsedElem, match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_EXISTS ) )
{
typeVarDef.setName( T._strValue );
Map<String, ITypeVariableDefinition> typeVarMap = getTypeVariables();
if( !typeVarExists( typeVarMap, typeVarDef ) )
{
getTypeVariables().put( typeVarDef.getName(), typeVarDef );
}
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
IType boundingType = null;
boolean bExtends;
if( bExtends = match( null, Keyword.KW_extends ) )
{
if( bFirstPass )
{
boundingType = typeVarDef.getType();
typeVarDef.setBoundingType( boundingType );
eatTypeLiteral();
}
else
{
typeVarDef.setBoundingType( PENDING_BOUNDING_TYPE );
parseTypeLiteral();
boxTypeLiteralsType( (TypeLiteral)peekExpression() );
TypeLiteral typeLiteral = (TypeLiteral)popExpression();
boundingType = typeLiteral.getType().getType();
if( verify( typeLiteral, boundingType != typeVarDef.getType(), Res.MSG_CYCLIC_INHERITANCE, boundingType.getRelativeName() ) )
{
typeVarDef.setBoundingType( boundingType );
}
}
}
// For editors only
TypeVariableExtendsListClause e = new TypeVariableExtendsListClause( boundingType );
pushExpression( e );
// Note for a zero-length extends clause we must shift it to the left one char to avoid having it follow the enclosing TypeVariableListClause
setLocation( iOffset - (bExtends ? 0 : 1), iLineNum, iColumn, !bExtends, true );
popExpression();
}
else
{
typeVarDef.setName( "" );
verify( typeVarDef, false, Res.MSG_ERRANT_TYPE_VAR );
Map<String, ITypeVariableDefinition> typeVarMap = getTypeVariables();
if( !typeVarMap.containsKey( typeVarDef.getName() ) )
{
getTypeVariables().put( typeVarDef.getName(), typeVarDef );
}
pushExpression( typeVarDef );
// Set the location to zero length at the end of the last token
Token priorT = getTokenizer().getPriorToken();
setLocation( priorT.getTokenEnd(), priorT.getLine(), priorT.getTokenColumn() );
return false;
}
pushExpression( typeVarDef );
return true;
}
private void parseVariance( ParsedElement parsedElem, TypeVariableDefinition typeVarDef )
{
int iOffsetList = getTokenizer().getTokenStart();
int iLineNumList = getTokenizer().getLineNumber();
int iColumnList = getTokenizer().getTokenColumn();
boolean bCovariant = false;
boolean bContravariant = false;
while( true )
{
Token token = getTokenizer().getCurrentToken();
if( Keyword.KW_in == token.getKeyword() )
{
getTokenizer().nextToken();
if( verify( parsedElem, !typeVarDef.getType().isFunctionStatement(), Res.MSG_UNEXPECTED_TOKEN, Keyword.KW_in ) )
{
if( verify( parsedElem, !bContravariant, Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_in, Keyword.KW_in ) )
{
bContravariant = true;
if( typeVarDef.getVariance() == Variance.COVARIANT )
{
typeVarDef.setVariance( Variance.INVARIANT );
}
else
{
typeVarDef.setVariance( Variance.CONTRAVARIANT );
}
}
}
}
else if( Keyword.KW_out == token.getKeyword() )
{
getTokenizer().nextToken();
if( verify( parsedElem, !typeVarDef.getType().isFunctionStatement(), Res.MSG_UNEXPECTED_TOKEN, Keyword.KW_out ) )
{
if( verify( parsedElem, !bCovariant, Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_out, Keyword.KW_out ) )
{
bCovariant = true;
if( typeVarDef.getVariance() == Variance.CONTRAVARIANT )
{
typeVarDef.setVariance( Variance.INVARIANT );
}
else
{
typeVarDef.setVariance( Variance.COVARIANT );
}
}
}
}
else
{
break;
}
}
pushModifierList( iOffsetList, iLineNumList, iColumnList );
}
private IType getEnclosingType()
{
if( isParsingFunction() )
{
return peekParsingFunction();
}
else if( isParsingStaticFeature() )
{
return ErrorType.getInstance( "decl_Static_Function" );
}
return getScriptPart() == null ? null : getScriptPart().getContainingType();
}
@Override
protected void pushExpression( Expression e )
{
assert e != null;
maybeVerifyDoubleLiterals( e );
_stack.push( e );
}
private void maybeVerifyDoubleLiterals( Expression e )
{
if( e instanceof AdditiveExpression || e instanceof MultiplicativeExpression )
{
IArithmeticExpression ae = (IArithmeticExpression) e;
maybeVerifyDoubleLiteral( ae.getLHS(), ae.getRHS() );
maybeVerifyDoubleLiteral( ae.getRHS(), ae.getLHS() );
}
}
private void maybeVerifyDoubleLiteral( IExpression oneSide, IExpression otherSide )
{
if( (JavaTypes.BIG_DECIMAL().equals( oneSide.getType() ) || JavaTypes.RATIONAL().equals( oneSide.getType() ) || JavaTypes.BIG_INTEGER().equals( oneSide.getType() )) && JavaTypes.pDOUBLE().equals( otherSide.getType() ))
{
if( otherSide instanceof UnaryExpression )
{
otherSide = ((UnaryExpression)otherSide).getExpression();
}
if( otherSide instanceof NumericLiteral )
{
NumericLiteral nl = (NumericLiteral)otherSide;
boolean repsAreIdentical = new BigDecimal( nl.getStrValue() ).equals( CommonServices.getCoercionManager().makeBigDecimalFrom( nl.getValue() ) );
verify( (ParsedElement)otherSide, repsAreIdentical, Res.MSG_LOSS_OF_PRECISION_IN_NUMERIC_LITERAL, nl.getStrValue() + "bd" );
}
}
}
@Override
public Expression popExpression()
{
return (Expression)_stack.pop();
}
@Override
public void setTokenizer( ISourceCodeTokenizer tokenizer )
{
_tokenizer = (SourceCodeTokenizer)tokenizer;
}
@Override
protected Expression peekExpression()
{
IParsedElement elem = peekParsedElement();
return elem instanceof Expression ? (Expression)elem : null;
}
protected ParsedElement peekParsedElement()
{
if( _stack.isEmpty() )
{
return null;
}
return _stack.peek();
}
@Override
protected void pushStatement( Statement stmt )
{
_stack.push( stmt );
}
@Override
protected Statement popStatement()
{
ParsedElement stmt = _stack.pop();
return (Statement)stmt;
}
@Override
protected Statement peekStatement()
{
IParsedElement elem = peekParsedElement();
return elem instanceof Statement ? (Statement)elem : null;
}
protected void pushDynamicFunctionSymbol( DynamicFunctionSymbol stmt )
{
_stackDFS.push( stmt );
}
protected DynamicFunctionSymbol popDynamicFunctionSymbol()
{
return _stackDFS.pop();
}
protected DynamicFunctionSymbol peekDynamicFunctionSymbol()
{
if( _stackDFS.isEmpty() )
{
return null;
}
return _stackDFS.peek();
}
protected void clearDfsStack()
{
_stackDFS.clear();
}
public void putDfsDeclsInTable( ISymbolTable table )
{
if( table == null )
{
return;
}
for( Iterator iterator = table.getSymbols().values().iterator(); iterator.hasNext(); )
{
ISymbol symbol = (ISymbol)iterator.next();
if( symbol instanceof IDynamicFunctionSymbol )
{
putDfsDeclInSetByName( (IDynamicFunctionSymbol)symbol );
}
}
}
public void putDfsDeclInSetByName( IDynamicFunctionSymbol dfs )
{
String displayName = dfs.getDisplayName();
List<IFunctionSymbol> dfsDecls = _dfsDeclByName.get( displayName );
if( dfsDecls == null )
{
dfsDecls = new ArrayList<>( 2 );
try
{
_dfsDeclByName.put( displayName, dfsDecls );
dfsDecls.add( dfs );
}
catch( Exception e )
{
throw new RuntimeException( "Map type: " + _dfsDeclByName.getClass().getName(), e );
}
}
else
{
int iIndex = dfsDecls.indexOf( dfs );
if( iIndex >= 0 )
{
dfsDecls.set( iIndex, dfs );
}
else
{
dfsDecls.add( dfs );
}
}
}
public int nextIndexOfErrantDuplicateDynamicSymbol( IDynamicSymbol ds, Collection<? extends ISymbol> symbols, boolean bCheckContains )
{
if( symbols == null )
{
return -1;
}
int iMax = -1;
if( !bCheckContains || symbolIn( ds, symbols ) )
{
for( ISymbol csr : symbols )
{
boolean bInjected = csr instanceof IInjectedSymbol;
if( !(csr instanceof IDynamicSymbol) && !bInjected )
{
continue;
}
String strName = csr.getName();
if( csr.getGosuClass() == ds.getGosuClass() || bInjected )
{
if( iMax < 0 && strName.equals( ds.getName() ) )
{
iMax = 0;
}
else if( strName.toLowerCase().contains( "_duplicate_" + ds.getName().toLowerCase() ) )
{
int iIndex = Integer.parseInt( strName.substring( 0, strName.indexOf( '_' ) ) );
if( iIndex > iMax )
{
iMax = iIndex;
}
}
}
}
}
return iMax < 0 ? iMax : iMax+1;
}
private boolean symbolIn( IDynamicSymbol ds, Collection<? extends ISymbol> symbols )
{
for( ISymbol s : symbols )
{
if( s.getName().equals( ds.getName() ) )
{
return true;
}
}
return false;
}
public void setDfsDeclInSetByName( Map<String, List<IFunctionSymbol>> dfsDecl )
{
_dfsDeclByName = dfsDecl;
}
protected void newDfsDeclInSetByName()
{
_dfsDeclByName = new HashMap<>();
}
public Map<String, List<IFunctionSymbol>> getDfsDecls()
{
return _dfsDeclByName;
}
protected List<IFunctionType> getFunctionTypesForName( String strFunctionName )
{
List<IFunctionSymbol> list = getDfsDeclsForFunction( strFunctionName );
List<IFunctionType> listOfTypes = new ArrayList<>( list.size() );
for (IFunctionSymbol dfs : list)
{
listOfTypes.add((FunctionType) dfs.getType());
}
return listOfTypes;
}
protected TypeLiteral resolveTypeLiteral( String[] T )
{
return resolveTypeLiteral( T, true, false );
}
protected TypeLiteral resolveTypeLiteral( String[] T, boolean bRelative, boolean bInterface )
{
String strTypeName = T[0] == null ? "" : T[0];
return resolveTypeLiteral( strTypeName, bRelative, bInterface );
}
protected List<IFunctionSymbol> getDfsDeclsForFunction( String strFunctionName )
{
List<IFunctionSymbol> setOfDfsDecls = _dfsDeclByName.get( strFunctionName );
return setOfDfsDecls == null ? Collections.<IFunctionSymbol>emptyList() : setOfDfsDecls;
}
/**
* Resolves the type literal given by strTypeName. If parentType is non null then strTypeName is assumed relative to
* the given parent.
* @param strTypeName
*/
public TypeLiteral resolveTypeLiteral( String strTypeName )
{
return resolveTypeLiteral( strTypeName, true, false );
}
public TypeLiteral resolveTypeLiteral( String strTypeName, boolean bRelative, boolean bInterface )
{
int iArrayDims = 0;
if( strTypeName.length() > 0 && strTypeName.charAt( strTypeName.length()-1 ) == ']' )
{
int iFirstBracketIndex = strTypeName.indexOf( '[' );
strTypeName = strTypeName.substring( 0, iFirstBracketIndex );
iArrayDims = (strTypeName.length() - iFirstBracketIndex)/2;
}
IType intrType;
boolean bClassTypeVar = false;
ITypeVariableDefinition typeVarDef = getTypeVarDef( strTypeName );
if( typeVarDef != null )
{
bClassTypeVar = !typeVarDef.getType().isFunctionStatement();
intrType = typeVarDef.getType();
if( intrType == null )
{
intrType = resolveTypeByRelativeName( strTypeName );
}
}
else
{
if( strTypeName.indexOf( '.' ) >= 0 )
{
intrType = resolveTypeName( strTypeName, false );
if( intrType == null )
{
intrType = resolveTypeByRelativeName( strTypeName );
}
}
else
{
intrType = resolveTypeByRelativeName( strTypeName );
if( intrType == null )
{
intrType = resolveTypeName( strTypeName, bRelative );
}
}
}
if( intrType == null )
{
intrType = ErrorType.getInstance( strTypeName );
}
IType finalType = intrType;
for( int i = 0; i < iArrayDims; i++ )
{
finalType = finalType.getArrayType();
}
if (TypeSystem.isDeleted(finalType)) {
finalType = TypeSystem.getErrorType();
}
TypeLiteral typeLiteral = bInterface
? new InterfaceTypeLiteral( MetaType.getLiteral( finalType ), _ignoreTypeDeprecation > 0 )
: new TypeLiteral( MetaType.getLiteral( finalType ), _ignoreTypeDeprecation > 0 );
verify( typeLiteral, !bClassTypeVar || !isParsingStaticFeature() || isParsingConstructor(), Res.MSG_CANNOT_REFERENCE_CLASS_TYPE_VAR_IN_STATIC_CONTEXT );
if( verify( typeLiteral, !(intrType instanceof ErrorType) || IErrorType.NAME.equals( strTypeName ), Res.MSG_INVALID_TYPE, strTypeName ) )
{
verifyCase( typeLiteral, strTypeName, intrType.getName(), Res.MSG_TYPE_CASE_MISMATCH, true );
}
return typeLiteral;
}
private IType resolveTypeName( String strTypeName, boolean bRelative )
{
IType type = _typeCache.get( strTypeName );
if( type == null )
{
if( bRelative )
{
type = TypeLoaderAccess.instance().getTypeByRelativeNameIfValid_NoGenerics( strTypeName, getTypeUsesMap() );
}
else
{
type = TypeLoaderAccess.instance().getByFullNameIfValid( strTypeName );
}
_typeCache.put( strTypeName, type == null ? notfound : type );
}
return type == notfound ? null : type;
}
private ITypeVariableDefinition getTypeVarDef( String strTypeName )
{
ITypeVariableDefinition typeVarDef = getTypeVariables().get( strTypeName );
if( typeVarDef != null && getGosuClass() != null && getGosuClass().isStatic() && TypeLord.encloses( typeVarDef.getEnclosingType(), getGosuClass() ) )
{
// Static inner class cannot access enclosing type vars
typeVarDef = null;
}
if( typeVarDef == null )
{
typeVarDef = getEnclosingTypeVars().get( strTypeName );
}
return typeVarDef;
}
//## do we still need this method since inner class parsing re-uses the enclosing class' owner/parser
private Map<String, TypeVariableDefinition> getEnclosingTypeVars()
{
ICompilableType gsClass = getGosuClass();
if( gsClass == null )
{
return Collections.emptyMap();
}
Map<String, TypeVariableDefinition> mapTypeVarDefByName = new HashMap<>( 2 );
while( !gsClass.isStatic() && gsClass.getEnclosingType() != null )
{
// Note we don't resolve type vars defined in outer for static inner classes.
// This is because we do not maintain separate inner classes for each
// parameterization of a generic outer class
ICompilableType enclosingType = gsClass.getEnclosingType();
for( IGenericTypeVariable gtv : enclosingType.getGenericTypeVariables() )
{
Map<String, ITypeVariableDefinition> typeVarMap = getTypeVariables();
if( !typeVarMap.containsKey( gtv.getTypeVariableDefinition().getName() ) )
{
getTypeVariables().put( gtv.getName(), gtv.getTypeVariableDefinition() );
}
}
gsClass = enclosingType;
}
return mapTypeVarDefByName;
}
private IType resolveTypeByRelativeName( String strTypeName )
{
ICompilableType gsClass = getGosuClass();
if( gsClass == null )
{
return null;
}
return gsClass.resolveRelativeInnerClass( strTypeName, false );
}
public HashMap<String, ITypeVariableDefinition> getTypeVariables()
{
return _typeVarsByName;
}
protected void setTypeVariables( HashMap<String, ITypeVariableDefinition> map )
{
_typeVarsByName = map;
}
public IGosuClassInternal parseClass( String strQualifiedClassName, ISourceFileHandle sourceFile, boolean bThrowOnWarnings, boolean bFullyCompile ) throws ParseResultsException
{
GosuClassTypeLoader classLoader;
if (!ExecutionMode.isIDE()) {
classLoader = GosuClassTypeLoader.getDefaultClassLoader(TypeSystem.getGlobalModule());
} else {
IFile file = sourceFile.getFile();
IModule module = TypeSystem.getExecutionEnvironment().getModule(file);
if (module == null) {
// these are files outside of the typesystem (i.e. not in any source root)
classLoader = GosuClassTypeLoader.getDefaultClassLoader(TypeSystem.getGlobalModule());
} else {
classLoader = module.getModuleTypeLoader().getTypeLoader(GosuClassTypeLoader.class);
}
}
IGosuClassInternal gsClass = (IGosuClassInternal)classLoader.makeNewClass(sourceFile, _symTable);
gsClass.setEditorParser(this);
gsClass.setCreateEditorParser(isEditorParser());
try
{
if( bFullyCompile )
{
gsClass.compileDefinitionsIfNeeded( true );
}
else
{
gsClass.compileDeclarationsIfNeeded();
}
}
catch( ErrantGosuClassException e )
{
//ignore
}
//noinspection ThrowableResultOfMethodCallIgnored
if( gsClass.getParseResultsException() != null )
{
//noinspection ThrowableResultOfMethodCallIgnored
if( gsClass.getParseResultsException().hasParseExceptions() || bThrowOnWarnings )
{
throw gsClass.getParseResultsException();
}
}
return gsClass;
}
public IFunctionType getFunctionType( IType classBean, String functionName, Expression[] eArgs, List<IFunctionType> listAllMatchingMethods, GosuParser parser, boolean bMatchParamTypes ) throws ParseException
{
if( classBean == null )
{
throw new ParseException( parser == null ? null : parser.makeFullParserState(), Res.MSG_BEAN_CLASS_IS_NULL );
}
if( functionName == null )
{
throw new ParseException( parser == null ? null : parser.makeFullParserState(), Res.MSG_BEAN_MEMBER_PATH_IS_NULL );
}
ITypeInfo beanInfo = classBean.getTypeInfo();
if( beanInfo == null )
{
throw new ParseException( parser == null ? null : parser.makeFullParserState(), Res.MSG_NO_EXPLICIT_TYPE_INFO_FOUND, classBean.getName() );
}
if( ErrorType.shouldHandleAsErrorType( classBean ) )
{
return ErrorType.getInstance().getErrorTypeFunctionType( eArgs, functionName, listAllMatchingMethods );
}
boolean bFoundMethodWithName = false;
MethodList methods = BeanAccess.getMethods( beanInfo, getGosuClass() == null ? JavaTypes.OBJECT() : getGosuClass() );
if( methods != null )
{
DynamicArray<? extends IMethodInfo> theMethods = methods.getMethods(functionName);
for (int i = 0; i < theMethods.size; i++) {
IMethodInfo method = (IMethodInfo) theMethods.data[i];
if( BeanAccess.isDescriptorHidden( method ) )
{
continue;
}
bFoundMethodWithName = true;
if( !bMatchParamTypes )
{
return new FunctionType(method);
}
IParameterInfo[] paramTypes = method.getParameters();
if( eArgs == null || paramTypes.length == eArgs.length )
{
if( listAllMatchingMethods == null )
{
return new FunctionType(method);
}
listAllMatchingMethods.add(new FunctionType(method));
}
}
}
if( listAllMatchingMethods != null && listAllMatchingMethods.size() > 0 )
{
return listAllMatchingMethods.get( 0 );
}
if( bFoundMethodWithName )
{
throw new ParseException( parser == null ? null : parser.makeFullParserState(), Res.MSG_WRONG_NUMBER_OF_ARGS_FOR_METHOD_ON_CLASS, functionName, TypeSystem.getUnqualifiedClassName( classBean ) );
}
else
{
checkForStaticMethod( classBean, eArgs, functionName, parser );
if( classBean.isDynamic() )
{
IType[] params = null;
if( eArgs != null )
{
params = new IType[eArgs.length];
for( int i = 0; i < eArgs.length; i++ )
{
params[i] = eArgs[i].getType();
}
}
IMethodInfo mi = classBean.getTypeInfo().getMethod( functionName, params );
if( mi != null )
{
FunctionType funcType = new FunctionType( mi );
if( listAllMatchingMethods != null )
{
listAllMatchingMethods.add( funcType );
}
return funcType;
}
}
throw new ParseException( parser == null ? null : parser.makeFullParserState(), Res.MSG_NO_METHOD_DESCRIPTOR_FOUND_FOR_METHOD, functionName, TypeSystem.getUnqualifiedClassName( classBean ) );
}
}
private void checkForStaticMethod( IType classBean, Expression[] eArgs, String strMethod, GosuParser parserState )
throws ParseException
{
if( classBean instanceof MetaType)
{
IType referredType = ((MetaType)classBean).getType();
IType[] paramTypes = new IType[eArgs == null ? 0 : eArgs.length];
for( int i = 0; i < paramTypes.length; i++ )
{
paramTypes[i] = eArgs[i].getType();
}
IMethodInfo mi = referredType.getTypeInfo().getCallableMethod( strMethod, paramTypes );
if( mi != null && !mi.isStatic() )
{
throw new ParseException( parserState == null ? null : parserState.makeFullParserState(), Res.MSG_METHOD_IS_NOT_STATIC, strMethod, TypeSystem.getUnqualifiedClassName( classBean ) );
}
}
else
{
IType referredType = MetaType.get( classBean );
IType[] paramTypes = new IType[eArgs == null ? 0 : eArgs.length];
for( int i = 0; i < paramTypes.length; i++ )
{
paramTypes[i] = eArgs[i].getType();
}
IMethodInfo mi = referredType.getTypeInfo().getCallableMethod( strMethod, paramTypes );
if( mi != null && !mi.isStatic() )
{
throw new ParseException( parserState == null ? null : parserState.makeFullParserState(), Res.MSG_METHOD_IS_STATIC, strMethod, TypeSystem.getUnqualifiedClassName( classBean ) );
}
}
}
private IInvocableType inferFunctionType( IInvocableType funcType, List<? extends IExpression> eArgs, boolean bUseCtx, TypeVarToTypeMap inferenceMap )
{
if( funcType instanceof IFunctionType && funcType.isGenericType() )
{
return inferFunction( funcType, eArgs, bUseCtx, inferenceMap );
}
else if( funcType instanceof ConstructorType )
{
return inferConstructor( (ConstructorType)funcType, inferenceMap );
}
else
{
return funcType;
}
}
private IInvocableType inferConstructor( ConstructorType ctorType, TypeVarToTypeMap inferenceMap )
{
IType declaringType = ctorType.getDeclaringType();
if( declaringType.isGenericType() && !declaringType.isParameterizedType() )
{
IType actualDeclaringType = TypeLord.makeParameteredType( declaringType, inferenceMap );
if( actualDeclaringType != null )
{
List<? extends IConstructorInfo> genDeclaredConstructors = ((IRelativeTypeInfo)declaringType.getTypeInfo()).getDeclaredConstructors();
for( int i = 0; i < genDeclaredConstructors.size(); i++ )
{
IConstructorInfo rawCtor = genDeclaredConstructors.get( i );
if( new ConstructorType( rawCtor ).equals( ctorType ) )
{
List<? extends IConstructorInfo> paramDeclaredConstructors = ((IRelativeTypeInfo)actualDeclaringType.getTypeInfo()).getDeclaredConstructors();
for( IConstructorInfo csr: paramDeclaredConstructors )
{
if( csr.hasRawConstructor( rawCtor ) )
{
return new ConstructorType( csr );
}
}
break;
}
}
}
}
else if( declaringType.isParameterizedType() )
{
IConstructorInfo ci = ctorType.getConstructor();
List<? extends IConstructorInfo> constructors = ((IRelativeTypeInfo)declaringType.getTypeInfo()).getConstructors( getGosuClass() );
int index = constructors.indexOf( ci );
if( index >= 0 )
{
IType actualType = TypeLord.getActualType( declaringType, inferenceMap, true );
List<? extends IConstructorInfo> actualTypeCtors = ((IRelativeTypeInfo)actualType.getTypeInfo()).getConstructors( getGosuClass() );
ci = actualTypeCtors.get( index );
ctorType = new ConstructorType( ci );
}
}
return ctorType;
}
private IInvocableType inferFunction( IInvocableType funcType, List<? extends IExpression> eArgs, boolean bUseCtx, TypeVarToTypeMap inferenceMap )
{
IType[] argTypes = new IType[eArgs.size()];
for( int i = 0; i < eArgs.size(); i++ )
{
argTypes[i] = eArgs.get( i ).getType();
}
for( int i = 0; i < funcType.getParameterTypes().length; i++ )
{
IType paramType = funcType.getParameterTypes()[i];
if( i < argTypes.length )
{
IType argType = argTypes[i];
IType boundArgType = TypeLord.boundTypes( paramType, getCurrentlyInferringFunctionTypeVars() );
ICoercer coercer = CommonServices.getCoercionManager().resolveCoercerStatically( boundArgType, argType );
if( coercer instanceof IResolvingCoercer )
{
argTypes[i] = ((IResolvingCoercer)coercer).resolveType( paramType, argType );
argTypes[i] = TypeLord.getActualType( argTypes[i], inferenceMap, true );
}
}
}
return ((IFunctionType) funcType).inferParameterizedTypeFromArgTypesAndContextType( argTypes, bUseCtx ? getContextType().getType() : null );
}
/**
* Using some simple pattern matching, get a potential property name from a
* method name at the end of an access list.
* <p/>
* Patterns:<br>
* <code>get</code>>mixed-case-name<
* <code>is</code>>mixed-case-name<
*/
private static final String[] METHOD_PREFIX_LIST = {"get", "is"};
private String getPropertyNameFromMethodName( String strMethod )
{
if( strMethod == null || strMethod.length() == 0 )
{
return null;
}
for( String strPrefix : METHOD_PREFIX_LIST )
{
String strProperty = getPropertyNameFromMethodName( strPrefix, strMethod );
if( strProperty != null )
{
return strProperty;
}
}
return null;
}
private static final String[] METHOD_PREFIX_LIST_WITH_SETTER = {"get", "is", "set" };
private String getPropertyNameFromMethodNameIncludingSetter( String strMethod )
{
if( strMethod == null || strMethod.length() == 0 )
{
return null;
}
for( String strPrefix : METHOD_PREFIX_LIST_WITH_SETTER )
{
String strProperty = getPropertyNameFromMethodName( strPrefix, strMethod );
if( strProperty != null )
{
return strProperty;
}
}
return null;
}
private String getPropertyNameFromMethodName( String strPrefix, String strMethod )
{
int iPropertyOffset = strPrefix.length();
if( strMethod.startsWith( strPrefix ) && strMethod.length() > iPropertyOffset )
{
return strMethod.substring( iPropertyOffset );
}
return null;
}
private void verifyPropertyWritable( IType classRoot, String strProperty, boolean bFromObjInitializer ) throws ParseException
{
if( classRoot == null )
{
throw new IllegalArgumentException( "Root class is null|" );
}
if( strProperty == null )
{
throw new IllegalArgumentException( "Bean member path is null!" );
}
IPropertyInfo pi = BeanAccess.getPropertyInfo( classRoot, strProperty, null, null, null );
if( pi != null )
{
if( !BeanAccess.isDescriptorHidden( pi ) )
{
if( !pi.isWritable( getGosuClass() ) )
{
if( bFromObjInitializer || !(isParsingConstructor() && pi instanceof IGosuVarPropertyInfo && pi.isFinal() && !pi.isStatic()) ) {
throw new ParseException( makeFullParserState(), Res.MSG_CLASS_PROPERTY_NOT_WRITABLE, strProperty, TypeSystem.getUnqualifiedClassName( classRoot ));
}
}
return;
}
}
throw new IllegalArgumentException( "No property descriptor found for property, " + strProperty + ", on class, " + TypeSystem.getUnqualifiedClassName( classRoot ) );
}
/**
* Get the type of the method specified in the member path.
*
*@param classBean The declaring class of the constructor.
* @param parserState The parserState that may be involved in the process of parsing a constructor. Can be null.
* @return A Gosu type for the constructor.
*/
public IConstructorType getConstructorType( IType classBean, Expression[] eArgs, List<IConstructorType> listAllMatchingMethods, ParserBase parserState ) throws ParseException
{
if( classBean == null )
{
throw new ParseException( parserState == null ? null : parserState.makeFullParserState(), Res.MSG_BEAN_CLASS_IS_NULL );
}
if( ErrorType.shouldHandleAsErrorType( classBean ) )
{
return ErrorType.getInstance().getErrorTypeConstructorType( eArgs, listAllMatchingMethods );
}
if( classBean instanceof TypeVariableType )
{
IType[] paramTypes = new IType[eArgs == null ? 0 : eArgs.length];
for( int i = 0; i < paramTypes.length; i++ )
{
paramTypes[i] = eArgs[i].getType();
}
ConstructorType ctorType = new ConstructorType( new DynamicConstructorInfo( classBean.getTypeInfo(), paramTypes ) );
if( listAllMatchingMethods != null )
{
listAllMatchingMethods.add( ctorType );
}
return ctorType;
}
ITypeInfo typeInfo = classBean.getTypeInfo();
if( typeInfo != null )
{
List<? extends IConstructorInfo> constructors;
if( typeInfo instanceof IRelativeTypeInfo )
{
while( classBean instanceof ITypeVariableType )
{
classBean = ((ITypeVariableType)classBean).getBoundingType();
}
constructors = ((IRelativeTypeInfo)typeInfo).getConstructors( classBean );
}
else
{
constructors = typeInfo.getConstructors();
}
for( IConstructorInfo constructor : constructors )
{
if( typeInfo instanceof JavaTypeInfo )
{
if( constructor.isPrivate() )
{
continue;
}
}
IParameterInfo[] paramTypes = constructor.getParameters();
if( eArgs == null || paramTypes.length == eArgs.length )
{
if( listAllMatchingMethods == null )
{
return new ConstructorType( constructor );
}
listAllMatchingMethods.add( new ConstructorType( constructor ) );
}
}
if( listAllMatchingMethods != null && listAllMatchingMethods.size() > 0 )
{
return listAllMatchingMethods.get( 0 );
}
}
throw new NoCtorFoundException( parserState == null ? null : parserState.makeFullParserState(), TypeSystem.getUnqualifiedClassName( classBean ), eArgs == null ? 0 : eArgs.length );
}
private void verifyCase( ParsedElement element, String foundName, String actualName, ResourceKey errorKey,
boolean isEndsWithMatchOK )
{
verifyCase( element, foundName, actualName, null, errorKey, isEndsWithMatchOK );
}
private void verifyCase( ParsedElement element, String foundName, String actualName, IParserState state, ResourceKey errorKey, boolean isEndsWithMatchOK )
{
if( _bWarnOnCaseIssue )
{
if( isEndsWithMatchOK )
{
if( !actualName.endsWith( foundName ) )
{
if( actualName.toUpperCase().endsWith( foundName.toUpperCase() ) )
{
CharSequence correctedName = actualName.subSequence( actualName.length() - foundName.length(), actualName.length() );
if(state == null) {
warn( element, false, errorKey, foundName, correctedName );
} else {
warn( element, false, state, errorKey, foundName, correctedName );
}
}
}
}
else if( !GosuObjectUtil.equals( foundName, actualName ) )
{
if( actualName.toUpperCase().equals( foundName.toUpperCase() ) )
{
if(state == null) {
warn( element, false, errorKey, foundName, actualName );
} else {
warn( element, false, state, errorKey, foundName, actualName );
}
}
}
}
}
public void setWarnOnCaseIssue( boolean warnOnCaseIssue )
{
_bWarnOnCaseIssue = warnOnCaseIssue;
}
public void setEditorParser(boolean bStudioEditorParser) {
_bStudioEditorParser = bStudioEditorParser;
}
public boolean isEditorParser() {
if (getOwner() != this) {
return getOwner().isEditorParser();
}
return _bStudioEditorParser;
}
public IParserState getState() {
return makeFullParserState();
}
public boolean isParsingAnnotation()
{
return _parsingAnnotation;
}
public void setParsingAnnotation( boolean parsingAnnotation )
{
_parsingAnnotation = parsingAnnotation;
}
public boolean isAllowingWildcards()
{
return _allowWildcards;
}
public void setAllowWildcards( boolean allowWildcards )
{
_allowWildcards = allowWildcards;
}
public boolean isIgnoreTypeDeprecation()
{
return _ignoreTypeDeprecation > 0;
}
public void pushIgnoreTypeDeprecation()
{
_ignoreTypeDeprecation++;
}
public void popIgnoreTypeDeprecation()
{
if( _ignoreTypeDeprecation == 0 )
{
throw new IllegalStateException( "Unbalanced calls to push/popIgnoreTypeDeprecation()" );
}
_ignoreTypeDeprecation
}
public void setLocationsFromProgramClassParser( List<ParseTree> savedLocations )
{
_savedLocations = savedLocations;
}
boolean maybeAdvanceTokenizerToEndOfSavedLocation()
{
if( _savedLocations == null )
{
return false;
}
for( ParseTree pt : _savedLocations )
{
Token T = getTokenizer().getCurrentToken();
if( T.getTokenStart() >= pt.getOffset() && T.getTokenEnd() <= pt.getExtent() )
{
try
{
getTokenizer().goToPosition( pt.getOffset() + pt.getLength() );
return true;
}
catch( IOException e )
{
// Eof ok
return true;
}
}
}
return false;
}
protected void pushTypeVariableTypesToInfer( IInvocableType functionType )
{
if( functionType != null )
{
List<IType> typeVariableTypes = new ArrayList<>();
if( functionType.isGenericType() )
{
IGenericTypeVariable[] typeVariables = functionType.getGenericTypeVariables();
addTypeVarsToList( typeVariableTypes, typeVariables );
}
else if( functionType instanceof ConstructorType )
{
IType declaringType = ((ConstructorType)functionType).getDeclaringType();
if( declaringType.isGenericType() && !declaringType.isParameterizedType() )
{
IGenericTypeVariable[] typeVariables = declaringType.getGenericTypeVariables();
addTypeVarsToList( typeVariableTypes, typeVariables );
}
}
pushInferringFunctionTypeVars( typeVariableTypes );
}
}
private void addTypeVarsToList( List<IType> typeVariableTypes, IGenericTypeVariable[] typeVariables )
{
for( IGenericTypeVariable typeVariable : typeVariables )
{
ITypeVariableDefinition typeVariableDefinition = typeVariable.getTypeVariableDefinition();
if( typeVariableDefinition != null && typeVariableDefinition.getType() != null )
{
typeVariableTypes.add( typeVariableDefinition.getType() );
}
}
}
@Override
public String toString()
{
return "Parsing: " + getScriptPart();
}
private static class GosuParserTransparentActivationContext extends TransparentActivationContext
{
public GosuParserTransparentActivationContext( IScriptPartId scriptPart )
{
super( scriptPart );
}
public String getLabel()
{
return getContext().toString();
}
}
private static class FunctionDeclTransparentActivationContext extends TransparentActivationContext
{
public FunctionDeclTransparentActivationContext( IScriptPartId scriptPart )
{
super( scriptPart );
}
public String getLabel()
{
return "parseFunctionDecl";
}
}
}
|
package org.jboss.remoting3.test;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.jboss.remoting3.Channel;
import org.jboss.remoting3.MessageCancelledException;
import org.jboss.remoting3.MessageInputStream;
import org.jboss.remoting3.MessageOutputStream;
import org.testng.annotations.Test;
import org.xnio.IoUtils;
import static org.testng.Assert.*;
/**
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
*/
public abstract class ChannelTestBase {
private static final int TEST_FILE_LENGTH = 20480;
protected Channel sendChannel;
protected Channel recvChannel;
@Test
public void testEmptyMessage() throws IOException, InterruptedException {
final AtomicBoolean wasEmpty = new AtomicBoolean();
final AtomicReference<IOException> exRef = new AtomicReference<IOException>();
final CountDownLatch latch = new CountDownLatch(1);
recvChannel.receiveMessage(new Channel.Receiver() {
public void handleError(final Channel channel, final IOException error) {
error.printStackTrace();
exRef.set(error);
latch.countDown();
}
public void handleEnd(final Channel channel) {
System.out.println("End of channel");
latch.countDown();
}
public void handleMessage(final Channel channel, final MessageInputStream message) {
System.out.println("Message received");
try {
if (message.read() == -1) {
wasEmpty.set(true);
}
message.close();
} catch (IOException e) {
exRef.set(e);
} finally {
IoUtils.safeClose(message);
latch.countDown();
}
}
});
MessageOutputStream messageOutputStream = sendChannel.writeMessage();
messageOutputStream.close();
latch.await();
IOException exception = exRef.get();
if (exception != null) {
throw exception;
}
assertTrue(wasEmpty.get());
}
@Test
public void testLotsOfContent() throws IOException, InterruptedException {
final AtomicBoolean wasOk = new AtomicBoolean();
final AtomicReference<IOException> exRef = new AtomicReference<IOException>();
final CountDownLatch latch = new CountDownLatch(1);
InputStream stream = ChannelTestBase.class.getResourceAsStream("/test-content.bin");
assertNotNull(stream);
final byte[] data;
try {
data = new byte[TEST_FILE_LENGTH];
int c = 0;
do {
int r = stream.read(data);
if (r == -1) {
break;
}
c += r;
} while (c < TEST_FILE_LENGTH);
stream.close();
} finally {
IoUtils.safeClose(stream);
}
recvChannel.receiveMessage(new Channel.Receiver() {
public void handleError(final Channel channel, final IOException error) {
error.printStackTrace();
exRef.set(error);
latch.countDown();
}
public void handleEnd(final Channel channel) {
System.out.println("End of channel");
latch.countDown();
}
public void handleMessage(final Channel channel, final MessageInputStream message) {
try {
System.out.println("Message received");
final byte[] received = new byte[TEST_FILE_LENGTH];
int c = 0;
do {
int r = message.read(received, c, TEST_FILE_LENGTH - c);
if (r == -1) {
break;
}
c += r;
} while (c < TEST_FILE_LENGTH);
message.close();
assertEquals(data, received);
wasOk.set(true);
} catch (IOException e) {
exRef.set(e);
} finally {
IoUtils.safeClose(message);
latch.countDown();
}
}
});
MessageOutputStream messageOutputStream = sendChannel.writeMessage();
messageOutputStream.write(data);
messageOutputStream.close();
latch.await();
IOException exception = exRef.get();
if (exception != null) {
throw exception;
}
assertTrue(wasOk.get());
}
@Test
public void testWriteCancel() throws IOException, InterruptedException {
final AtomicBoolean wasOk = new AtomicBoolean();
final AtomicReference<IOException> exRef = new AtomicReference<IOException>();
final CountDownLatch latch = new CountDownLatch(1);
InputStream stream = ChannelTestBase.class.getResourceAsStream("/test-content.bin");
assertNotNull(stream);
final byte[] data;
try {
data = new byte[TEST_FILE_LENGTH];
int c = 0;
do {
int r = stream.read(data);
if (r == -1) {
break;
}
c += r;
} while (c < TEST_FILE_LENGTH);
stream.close();
} finally {
IoUtils.safeClose(stream);
}
recvChannel.receiveMessage(new Channel.Receiver() {
public void handleError(final Channel channel, final IOException error) {
error.printStackTrace();
exRef.set(error);
latch.countDown();
}
public void handleEnd(final Channel channel) {
System.out.println("End of channel");
latch.countDown();
}
public void handleMessage(final Channel channel, final MessageInputStream message) {
final byte[] received = new byte[TEST_FILE_LENGTH];
try {
System.out.println("Message received");
int c = 0;
do {
int r = message.read(received);
if (r == -1) {
break;
}
c += r;
} while (c < TEST_FILE_LENGTH);
message.close();
} catch (MessageCancelledException e) {
wasOk.set(Arrays.equals(data, received));
} catch (IOException e) {
exRef.set(e);
} finally {
IoUtils.safeClose(message);
latch.countDown();
}
}
});
MessageOutputStream messageOutputStream = sendChannel.writeMessage();
messageOutputStream.write(data);
messageOutputStream.cancel();
messageOutputStream.close();
latch.await();
IOException exception = exRef.get();
if (exception != null) {
throw exception;
}
assertTrue(wasOk.get());
}
@Test
public void testSimpleWriteMethod() throws Exception {
Byte[] bytes = new Byte[] {1, 2, 3};
MessageOutputStream out = sendChannel.writeMessage();
for (int i = 0 ; i < bytes.length ; i++) {
out.write(bytes[i]);
}
out.close();
final CountDownLatch latch = new CountDownLatch(1);
final ArrayList<Byte> result = new ArrayList<Byte>();
final AtomicReference<IOException> exRef = new AtomicReference<IOException>();
recvChannel.receiveMessage(new Channel.Receiver() {
public void handleError(final Channel channel, final IOException error) {
error.printStackTrace();
latch.countDown();
}
public void handleEnd(final Channel channel) {
System.out.println("End of channel");
latch.countDown();
}
public void handleMessage(final Channel channel, final MessageInputStream message) {
System.out.println("Message received");
try {
int i = message.read();
while (i != -1) {
result.add((byte)i);
i = message.read();
}
message.close();
} catch (IOException e) {
exRef.set(e);
} finally {
IoUtils.safeClose(message);
latch.countDown();
}
}
});
latch.await();
assertNull(exRef.get());
Byte[] resultBytes = result.toArray(new Byte[result.size()]);
assertEquals(bytes, resultBytes);
}
}
|
package GUI;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JTextArea;
import ProgramControl.MainController;
/*
* DESCRIPTION AND JAVADOCS TODO
*/
@SuppressWarnings("serial")
public class MainMenu extends JFrame implements ActionListener {
private JMenuBar MenuBar;
private JMenu File, Edit, View, Help;
private JTextArea CurrentDirectory;
private JLabel CurDir;
private JButton Browse, Format, EditTags, Organize, Quit;
public MainMenu() {
this.setLayout(null);
//Declares each element needed for the Main Menu
this.MenuBar = new JMenuBar();
this.File = new JMenu("File");
this.Edit = new JMenu("Edit");
this.View = new JMenu("View");
this.Help = new JMenu("Help");
this.CurrentDirectory = new JTextArea(MainController.CurrentDirectory.toString());
this.CurrentDirectory.setEditable(false);
this.Browse = new JButton("Change...");
this.CurDir = new JLabel("Current Directory:");
this.Format = new JButton("<html><center>Format MP3 FileNames</center></html>");
this.EditTags = new JButton("<html><center>Edit ID3 Tags</center></html>");
this.Organize = new JButton("<html><center>Organize Music Collection</center></html>");
this.Quit = new JButton("Quit");
//Adds the Action Listener to each element that requires one
this.Browse.addActionListener(this);
this.Format.addActionListener(this);
this.EditTags.addActionListener(this);
this.Organize.addActionListener(this);
this.Quit.addActionListener(this);
//Sets the location for each element
this.CurrentDirectory.setBounds(15, 25, 325, 18);
this.Browse.setBounds(350, 20, 90, 25);
this.CurDir.setBounds(15, 5, 325, 15);
this.Format.setBounds(15, 75, 150, 75);
this.EditTags.setBounds(15, 175, 150, 75);
this.Organize.setBounds(200, 75, 150, 75);
this.Quit.setBounds(200, 175, 150, 75);
//Adds each Menu Bar element to the Menu Bar
this.MenuBar.add(this.File);
this.MenuBar.add(this.Edit);
this.MenuBar.add(this.View);
this.MenuBar.add(this.Help);
//Adds each element to the Window
this.add(MenuBar);
this.add(CurrentDirectory);
this.add(Browse);
this.add(CurDir);
this.add(Format);
this.add(EditTags);
this.add(Organize);
this.add(Quit);
//Sets the window title, size, default for closing & sets it to open in the middle of the screen
this.setTitle("MainMenu");
this.setSize(500, 300);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setLocationRelativeTo(null);
}
@Override
public void actionPerformed(ActionEvent event) {
if (event.getSource() == Browse) {
int Location = MainController.FileChooser.showOpenDialog(MainController.ReferenceFrame);
if (Location == JFileChooser.APPROVE_OPTION) {
MainController.CurrentDirectory = MainController.FileChooser.getSelectedFile();
}
this.CurrentDirectory.setText(MainController.CurrentDirectory.toString());
}
else if (event.getSource() == Format) {
MainController.MainMenu.setEnabled(false);
MainController.FFSW = new FileFormatSelectionWindow();
MainController.FFSW.setVisible(true);
}
else if (event.getSource() == EditTags) {
MainController.MainMenu.setEnabled(false);
MainController.TEW = new TagEditWindow();
MainController.TEW.setVisible(true);
}
else if (event.getSource() == Organize) {
MainController.MainMenu.setEnabled(false);
MainController.OSSW = new OrganizationStyleSelectionWindow();
MainController.OSSW.setVisible(true);
}
else if (event.getSource() == Quit) {
System.exit(0);
}
}
}
|
package de.ztube.yuno.screens;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.glutils.ShaderProgram;
import com.badlogic.gdx.maps.MapLayer;
import com.badlogic.gdx.maps.tiled.TiledMap;
import com.badlogic.gdx.maps.tiled.TiledMapRenderer;
import com.badlogic.gdx.maps.tiled.TiledMapTileLayer;
import com.badlogic.gdx.maps.tiled.renderers.OrthogonalTiledMapRenderer;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.utils.IntArray;
import com.badlogic.gdx.utils.viewport.FitViewport;
import com.badlogic.gdx.utils.viewport.Viewport;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import de.ztube.yuno.Yuno;
import de.ztube.yuno.entity.player.Player;
import de.ztube.yuno.gui.GUI;
/**The main Game class*/
public class Game implements Screen {
//The SpriteBatch
private SpriteBatch batch;
//The camera
private OrthographicCamera camera;
//The viewport
private Viewport viewport;
//The current map and renderer
private TiledMap map;
private TiledMapRenderer renderer;
//The Player
private Player player;
//Shader
private ShaderProgram shader;
//The GUI
private GUI gui;
private int mapWidth, mapHeight, mapTileWidth, mapTileHeight;
//List of MapLayers which should be displayed either over, or under the player
private IntArray renderOverPlayer = new IntArray();
private IntArray renderUnderPlayer = new IntArray();
public Game() {
}
@Override
public void render(float delta) {
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glBlendFunc(GL20.GL_SRC_ALPHA, GL20.GL_ONE_MINUS_SRC_ALPHA);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
//Set uniform "u_rand" to a random number for noise shading. TODO: better way?
shader.begin();
shader.setUniformf("u_rand", MathUtils.random(0f, 1f));
shader.end();
//Update the camera position according to the players position
updateCameraPosition();
camera.update();
renderer.setView(camera);
renderer.render(renderUnderPlayer.toArray());
batch.setProjectionMatrix(camera.combined);
batch.begin();
//Draw the Player
player.draw(batch);
batch.end();
renderer.render(renderOverPlayer.toArray());
//Draw the GUI
gui.act();
gui.draw();
}
@Override
public void show() {
batch = new SpriteBatch();
camera = new OrthographicCamera();
viewport = new FitViewport(Yuno.SCREEN_WIDTH, Yuno.SCREEN_HEIGHT, camera);
camera.update();
setMap("maps/main.tmx");
player = new Player(this, map);
gui = new GUI(player);
//Set the InputProcessor to GUI
Gdx.input.setInputProcessor(gui);
ShaderProgram.pedantic = false;
//Initialize the shader
shader = new ShaderProgram(Gdx.files.internal("shaders/passthrough.vsh"), Gdx.files.internal("shaders/vignette.fsh"));
if (!shader.isCompiled())
Gdx.app.error("Yuno", shader.getLog());
batch.setShader(shader);
shader.begin();
shader.setUniformf("u_resolution", Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Date date = new Date(); // given date
Calendar calendar = GregorianCalendar.getInstance(); // creates a new calendar instance
calendar.setTime(date); // assigns calendar to given date
int hour = calendar.get(Calendar.HOUR_OF_DAY);
//In the night everything looks blueish and noisy
//Day
if (hour <= 18 && hour >= 6) {
shader.setUniformf("u_time", 1.0f);
shader.setUniformf("u_noise", 0.0f);
}
//Night
else {
shader.setUniformf("u_time", 0.3f);
shader.setUniformf("u_noise", 0.4f);
}
shader.end();
Gdx.app.log("Yuno", "loaded Game");
}
private void updateCameraPosition() {
//Camera follows the Player
camera.position.set(player.getX() + player.getWidth() / 2, player.getY() + player.getHeight() / 2, 1);
//Camera stops following the Player if he reaches the end of the map
//OutTop
if (player.getY() + player.getHeight() / 2 > mapTileHeight * mapHeight - camera.viewportHeight / 2) {
camera.position.set(camera.position.x, mapTileHeight * mapHeight - camera.viewportHeight / 2, 1);
}
//OutBottom
else if (player.getY() + player.getHeight() / 2 < camera.viewportHeight / 2) {
camera.position.set(camera.position.x, camera.viewportHeight / 2, 1);
}
//OutLeft
if (player.getX() + player.getWidth() / 2 < camera.viewportWidth / 2) {
camera.position.set(camera.viewportWidth / 2, camera.position.y, 1);
}
//OutRight
else if (player.getX() + player.getWidth() / 2 > mapTileWidth * mapWidth - camera.viewportWidth / 2) {
camera.position.set(mapTileWidth * mapWidth - camera.viewportWidth / 2, camera.position.y, 1);
}
}
//Update the map returning itself
public TiledMap setMap(String mapPath) {
map = Yuno.assets.get(mapPath, TiledMap.class);
mapWidth = ((TiledMapTileLayer) map.getLayers().get(0)).getWidth();
mapHeight = ((TiledMapTileLayer) map.getLayers().get(0)).getHeight();
mapTileWidth = (int) ((TiledMapTileLayer) map.getLayers().get(0)).getTileWidth();
mapTileHeight = (int) ((TiledMapTileLayer) map.getLayers().get(0)).getTileHeight();
renderer = new OrthogonalTiledMapRenderer(map, batch);
renderer.setView(camera);
renderOverPlayer.clear();
renderUnderPlayer.clear();
for (int i = 0; i < map.getLayers().getCount(); i++) {
MapLayer layer = map.getLayers().get(i);
//assign mapLayers to being rendered over or under the Player
if (layer.getProperties().containsKey("floating") && (Boolean)layer.getProperties().get("floating"))
renderOverPlayer.add(i);
else
renderUnderPlayer.add(i);
}
return map;
}
@Override
public void resize(int width, int height) {
viewport.update(width, height);
shader.begin();
shader.setUniformf("u_resolution", width, height);
shader.end();
gui.getViewport().update(width, height);
}
@Override
public void pause() {
}
@Override
public void resume() {
}
@Override
public void hide() {
dispose();
}
@Override
public void dispose() {
map.dispose();
batch.dispose();
gui.dispose();
shader.dispose();
}
}
|
package io.spine.server.entity.storage;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* An annotation which is used to mark getters for {@linkplain EntityColumn entity columns}.
*
* <p>The properties of the annotation affect how the column is seen by the storage and clients.
*
* <p>The annotation will have effect only if it's applied to a {@code public} instance getter,
* meaning a method without parameters and with {@code get-} prefix. The {@code is-} prefix is
* supported for primitive {@code boolean} or boxed {@code Boolean} columns.
*
* <p>A {@link #name()} allows to specify a custom column name to be persisted in a {@code Storage}.
*
* <p>If there are repeated column names within an {@code Entity},
* the exception will be raised when a repository serving the entity is added to
* its {@code BoundedContext}.
*/
@Target(METHOD)
@Retention(RUNTIME)
public @interface Column {
/**
* The custom {@linkplain EntityColumn#name() name} of the column.
*
* <p>Defaults to the name extracted from the getter which is used for querying.
*/
String name() default "";
}
|
package com.intellij.ide.plugins;
import com.intellij.CommonBundle;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.plugins.cl.PluginClassLoader;
import com.intellij.ide.startup.StartupActionScriptManager;
import com.intellij.idea.Main;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.application.ex.PathManagerEx;
import com.intellij.openapi.application.impl.PluginsFacade;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.extensions.LogProvider;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Function;
import com.intellij.util.graph.CachingSemiGraph;
import com.intellij.util.graph.DFSTBuilder;
import com.intellij.util.graph.Graph;
import com.intellij.util.graph.GraphGenerator;
import com.intellij.util.lang.UrlClassLoader;
import com.intellij.util.text.StringTokenizer;
import gnu.trove.THashMap;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import sun.reflect.Reflection;
import javax.swing.*;
import java.io.*;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLDecoder;
import java.util.*;
import java.util.regex.Pattern;
/**
* @author mike
*/
@SuppressWarnings({"UseOfSystemOutOrSystemErr", "CallToPrintStackTrace"}) // No logger is loaded at this time so we have to use these.
public class PluginManager {
@SuppressWarnings({"NonConstantLogger"}) //Logger is lasy-initialized in order not to use it outside the appClassLoader
private static Logger ourLogger = null;
@NonNls public static final String AREA_IDEA_PROJECT = "IDEA_PROJECT";
@NonNls public static final String AREA_IDEA_MODULE = "IDEA_MODULE";
@NonNls private static final String PROPERTY_IGNORE_CLASSPATH = "ignore.classpath";
@NonNls private static final String PROPERTY_PLUGIN_PATH = "plugin.path";
private static final Object PLUGIN_CLASSES_LOCK = new Object();
private static String myPluginError = null;
@NonNls private static final String CORE_PLUGIN_ID = "com.intellij";
@NonNls public static final String DISABLED_PLUGINS_FILENAME = "disabled_plugins.txt";
private static List<String> ourDisabledPlugins = null;
static final Object lock = new Object();
private static String ourBuildNumber;
@NonNls private static final String PLUGIN_XML = "plugin.xml";
@NonNls private static final String FILE_CACHE = "fileCache";
@NonNls private static final String URL_CACHE = "urlCache";
@NonNls private static final String META_INF = "META-INF";
private static Logger getLogger() {
if (ourLogger == null) {
ourLogger = Logger.getInstance("#com.intellij.ide.plugins.PluginManager");
}
return ourLogger;
}
// these fields are accessed via Reflection, so their names must not be changed by the obfuscator
// do not make them private cause in this case they will be scrambled
@SuppressWarnings({"WeakerAccess"}) static String[] ourArguments;
@SuppressWarnings({"WeakerAccess"}) static String ourMainClass;
@SuppressWarnings({"WeakerAccess"}) static String ourMethodName;
public static void initPluginClasses () {
synchronized(lock) {
File file = new File (getPluginClassesPath());
if (file.exists())
file.delete();
}
}
@NonNls
static String getPluginClassesPath() {
return PathManagerEx.getPluginTempPath() + File.separator + "plugin.classes";
}
private static class Facade extends PluginsFacade {
public IdeaPluginDescriptor getPlugin(PluginId id) {
return PluginManager.getPlugin(id);
}
public IdeaPluginDescriptor[] getPlugins() {
return PluginManager.getPlugins();
}
}
private static IdeaPluginDescriptorImpl[] ourPlugins;
private static Map<String, PluginId> ourPluginClasses;
public static void main(final String[] args, final String mainClass, final String methodName) {
main(args, mainClass, methodName, new ArrayList<URL>());
}
public static void main(final String[] args, final String mainClass, final String methodName, List<URL> classpathElements) {
ourArguments = args;
ourMainClass = mainClass;
ourMethodName = methodName;
final PluginManager pluginManager = new PluginManager();
pluginManager.bootstrap(classpathElements);
}
/**
* do not call this method during bootstrap, should be called in a copy of PluginManager, loaded by IdeaClassLoader
*/
public synchronized static IdeaPluginDescriptor[] getPlugins() {
if (ourPlugins == null) {
initializePlugins();
clearJarURLCache();
}
return ourPlugins;
}
private static void initializePlugins() {
configureExtensions();
final IdeaPluginDescriptorImpl[] pluginDescriptors = loadDescriptors();
final Class callerClass = Reflection.getCallerClass(1);
final ClassLoader parentLoader = callerClass.getClassLoader();
final List<IdeaPluginDescriptorImpl> result = new ArrayList<IdeaPluginDescriptorImpl>();
for (IdeaPluginDescriptorImpl descriptor : pluginDescriptors) {
if (!shouldSkipPlugin(descriptor, pluginDescriptors)) {
result.add(descriptor);
} else {
descriptor.setEnabled(false);
final List<File> classPath = descriptor.getClassPath();
descriptor
.setLoader(createPluginClassLoader(classPath.toArray(new File[classPath.size()]), new ClassLoader[]{parentLoader}, descriptor));
}
}
prepareLoadingPluginsErrorMessage(filterBadPlugins(result));
final Map<PluginId, IdeaPluginDescriptorImpl> idToDescriptorMap = new HashMap<PluginId, IdeaPluginDescriptorImpl>();
for (final IdeaPluginDescriptorImpl descriptor : result) {
idToDescriptorMap.put(descriptor.getPluginId(), descriptor);
}
final IdeaPluginDescriptor corePluginDescriptor = idToDescriptorMap.get(PluginId.getId(CORE_PLUGIN_ID));
assert corePluginDescriptor != null;
for (IdeaPluginDescriptorImpl descriptor : result) {
if (descriptor != corePluginDescriptor) {
descriptor.insertDependency(corePluginDescriptor);
}
}
mergeOptionalConfigs(idToDescriptorMap);
// sort descriptors according to plugin dependencies
Collections.sort(result, getPluginDescriptorComparator(idToDescriptorMap));
for (final IdeaPluginDescriptorImpl pluginDescriptor : result) {
final List<File> classPath = pluginDescriptor.getClassPath();
final PluginId[] dependentPluginIds = pluginDescriptor.getDependentPluginIds();
final ClassLoader[] parentLoaders = getParentLoaders(idToDescriptorMap, dependentPluginIds);
final ClassLoader pluginClassLoader = createPluginClassLoader(classPath.toArray(new File[classPath.size()]),
parentLoaders.length > 0 ? parentLoaders : new ClassLoader[] {parentLoader},
pluginDescriptor);
pluginDescriptor.setLoader(pluginClassLoader);
pluginDescriptor.registerExtensions();
}
ourPlugins = pluginDescriptors;
}
private static void mergeOptionalConfigs(Map<PluginId, IdeaPluginDescriptorImpl> descriptors) {
for (IdeaPluginDescriptorImpl descriptor : descriptors.values()) {
final Map<PluginId, IdeaPluginDescriptorImpl> optionalDescriptors = descriptor.getOptionalDescriptors();
if (optionalDescriptors != null && !optionalDescriptors.isEmpty()) {
for (Map.Entry<PluginId, IdeaPluginDescriptorImpl> entry: optionalDescriptors.entrySet()) {
if (descriptors.containsKey(entry.getKey())) {
descriptor.mergeOptionalConfig(entry.getValue());
}
}
}
}
}
private static void prepareLoadingPluginsErrorMessage(final String errorMessage) {
if (errorMessage != null) {
if (!Main.isHeadless()) {
myPluginError = errorMessage;
} else {
getLogger().error(errorMessage);
}
}
}
private static void configureExtensions() {
Extensions.setLogProvider(new IdeaLogProvider());
Extensions.registerAreaClass(AREA_IDEA_PROJECT, null);
Extensions.registerAreaClass(AREA_IDEA_MODULE, AREA_IDEA_PROJECT);
}
private static boolean shouldLoadPlugins() {
try {
// no plugins during bootstrap
Class.forName("com.intellij.openapi.extensions.Extensions");
}
catch (ClassNotFoundException e) {
return false;
}
//noinspection HardCodedStringLiteral
final String loadPlugins = System.getProperty("idea.load.plugins");
return loadPlugins == null || Boolean.TRUE.toString().equals(loadPlugins);
}
public static boolean shouldSkipPlugin(final IdeaPluginDescriptor descriptor) {
return shouldSkipPlugin(descriptor, ourPlugins);
}
private static boolean shouldSkipPlugin(final IdeaPluginDescriptor descriptor, IdeaPluginDescriptor[] loaded) {
final String idString = descriptor.getPluginId().getIdString();
if (idString.equals(CORE_PLUGIN_ID)) {
return false;
}
//noinspection HardCodedStringLiteral
final String pluginId = System.getProperty("idea.load.plugins.id");
if (pluginId == null) {
if (descriptor instanceof IdeaPluginDescriptorImpl && !((IdeaPluginDescriptorImpl)descriptor).isEnabled()) return true;
if (!shouldLoadPlugins()) return true;
}
boolean shouldLoad;
//noinspection HardCodedStringLiteral
final String loadPluginCategory = System.getProperty("idea.load.plugins.category");
if (loadPluginCategory != null) {
shouldLoad = loadPluginCategory.equals(descriptor.getCategory());
}
else {
if (pluginId != null) {
shouldLoad = pluginId.equals(idString);
if (!shouldLoad) {
for (IdeaPluginDescriptor plugin : loaded) {
if (plugin.getPluginId().getIdString().equals(pluginId)) {
for (PluginId id: plugin.getDependentPluginIds()) {
if (id.equals(descriptor.getPluginId())) {
shouldLoad = true;
break;
}
}
break;
}
}
}
} else {
shouldLoad = !getDisabledPlugins().contains(idString);
}
if (shouldLoad && descriptor instanceof IdeaPluginDescriptorImpl) {
if (isIncompatible(descriptor)) return true;
}
}
return !shouldLoad;
}
public static boolean isIncompatible(final IdeaPluginDescriptor descriptor) {
final String buildNumber = getBuildNumber();
if (buildNumber != null) {
final String sinceBuild = ((IdeaPluginDescriptorImpl)descriptor).getSinceBuild();
try {
Integer.parseInt(sinceBuild);
if (sinceBuild.compareToIgnoreCase(buildNumber) > 0) {
return true;
}
}
catch (NumberFormatException e) {
//skip invalid numbers
}
final String untilBuild = ((IdeaPluginDescriptorImpl)descriptor).getUntilBuild();
try {
Integer.parseInt(untilBuild);
if (untilBuild.compareToIgnoreCase(buildNumber) < 0) {
return true;
}
}
catch (NumberFormatException e) {
//skip invalid numbers
}
}
return false;
}
private static Comparator<IdeaPluginDescriptor> getPluginDescriptorComparator(Map<PluginId, IdeaPluginDescriptorImpl> idToDescriptorMap) {
final Graph<PluginId> graph = createPluginIdGraph(idToDescriptorMap);
final DFSTBuilder<PluginId> builder = new DFSTBuilder<PluginId>(graph);
/*
if (!builder.isAcyclic()) {
final Pair<String,String> circularDependency = builder.getCircularDependency();
throw new Exception("Cyclic dependencies between plugins are not allowed: \"" + circularDependency.getFirst() + "\" and \"" + circularDependency.getSecond() + "");
}
*/
final Comparator<PluginId> idComparator = builder.comparator();
return new Comparator<IdeaPluginDescriptor>() {
public int compare(IdeaPluginDescriptor o1, IdeaPluginDescriptor o2) {
return idComparator.compare(o1.getPluginId(), o2.getPluginId());
}
};
}
private static Graph<PluginId> createPluginIdGraph(final Map<PluginId, IdeaPluginDescriptorImpl> idToDescriptorMap) {
final PluginId[] ids = idToDescriptorMap.keySet().toArray(new PluginId[idToDescriptorMap.size()]);
return GraphGenerator.create(CachingSemiGraph.create(new GraphGenerator.SemiGraph<PluginId>() {
public Collection<PluginId> getNodes() {
return Arrays.asList(ids);
}
public Iterator<PluginId> getIn(PluginId pluginId) {
final IdeaPluginDescriptor descriptor = idToDescriptorMap.get(pluginId);
ArrayList<PluginId> plugins = new ArrayList<PluginId>();
for(PluginId dependentPluginId: descriptor.getDependentPluginIds()) {
// check for missing optional dependency
if (idToDescriptorMap.containsKey(dependentPluginId)) {
plugins.add(dependentPluginId);
}
}
return plugins.iterator();
}
}));
}
private static ClassLoader[] getParentLoaders(Map<PluginId, IdeaPluginDescriptorImpl> idToDescriptorMap, PluginId[] pluginIds) {
if (ApplicationManager.getApplication().isUnitTestMode()) return new ClassLoader[0];
final List<ClassLoader> classLoaders = new ArrayList<ClassLoader>();
for (final PluginId id : pluginIds) {
IdeaPluginDescriptor pluginDescriptor = idToDescriptorMap.get(id);
if (pluginDescriptor == null) {
continue; // Might be an optional dependency
}
final ClassLoader loader = pluginDescriptor.getPluginClassLoader();
if (loader == null) {
getLogger().assertTrue(false, "Plugin class loader should be initialized for plugin " + id);
}
classLoaders.add(loader);
}
return classLoaders.toArray(new ClassLoader[classLoaders.size()]);
}
private static void clearJarURLCache() {
try {
/*
new URLConnection(null) {
public void connect() throws IOException {
throw new UnsupportedOperationException();
}
}.setDefaultUseCaches(false);
*/
Class jarFileFactory = Class.forName("sun.net.www.protocol.jar.JarFileFactory");
Field fileCache = jarFileFactory.getDeclaredField(FILE_CACHE);
Field urlCache = jarFileFactory.getDeclaredField(URL_CACHE);
fileCache.setAccessible(true);
fileCache.set(null, new HashMap());
urlCache.setAccessible(true);
urlCache.set(null, new HashMap());
}
catch (Exception e) {
System.out.println("Failed to clear URL cache");
// Do nothing.
}
}
/**
* Called via reflection
*/
@SuppressWarnings({"UnusedDeclaration"})
protected static void start() {
try {
//noinspection HardCodedStringLiteral
ThreadGroup threadGroup = new ThreadGroup("Idea Thread Group") {
public void uncaughtException(Thread t, Throwable e) {
getLogger().error(e);
}
};
Runnable runnable = new Runnable() {
public void run() {
try {
clearJarURLCache();
//noinspection AssignmentToStaticFieldFromInstanceMethod
PluginsFacade.INSTANCE = new Facade();
Class aClass = Class.forName(ourMainClass);
final Method method = aClass.getDeclaredMethod(ourMethodName, (ArrayUtil.EMPTY_STRING_ARRAY).getClass());
method.setAccessible(true);
//noinspection RedundantArrayCreation
method.invoke(null, new Object[]{ourArguments});
}
catch (Exception e) {
e.printStackTrace();
getLogger().error("Error while accessing " + ourMainClass + "." + ourMethodName + " with arguments: " + Arrays.asList(ourArguments), e);
}
}
};
//noinspection HardCodedStringLiteral
new Thread(threadGroup, runnable, "Idea Main Thread").start();
}
catch (Exception e) {
getLogger().error(e);
}
}
private static IdeaPluginDescriptorImpl[] loadDescriptors() {
if (isLoadingOfExternalPluginsDisabled()) {
return IdeaPluginDescriptorImpl.EMPTY_ARRAY;
}
final List<IdeaPluginDescriptorImpl> result = new ArrayList<IdeaPluginDescriptorImpl>();
loadDescriptors(PathManager.getPluginsPath(), result);
loadDescriptors(PathManager.getPreinstalledPluginsPath(), result);
loadDescriptorsFromProperty(result);
loadDescriptorsFromClassPath(result);
IdeaPluginDescriptorImpl[] pluginDescriptors = result.toArray(new IdeaPluginDescriptorImpl[result.size()]);
try {
Arrays.sort(pluginDescriptors, new PluginDescriptorComparator(pluginDescriptors));
}
catch (Exception e) {
prepareLoadingPluginsErrorMessage(IdeBundle.message("error.plugins.were.not.loaded", e.getMessage()));
getLogger().info(e);
pluginDescriptors = IdeaPluginDescriptorImpl.EMPTY_ARRAY;
}
return pluginDescriptors;
}
public static synchronized void reportPluginError() {
if (myPluginError != null) {
JOptionPane.showMessageDialog(null, myPluginError, IdeBundle.message("title.plugin.error"), JOptionPane.ERROR_MESSAGE);
myPluginError = null;
}
}
private static void loadDescriptorsFromProperty(final List<IdeaPluginDescriptorImpl> result) {
final String pathProperty = System.getProperty(PROPERTY_PLUGIN_PATH);
if (pathProperty == null) return;
for (java.util.StringTokenizer t = new java.util.StringTokenizer(pathProperty, File.pathSeparator); t.hasMoreTokens();) {
String s = t.nextToken();
final IdeaPluginDescriptorImpl ideaPluginDescriptor = loadDescriptor(new File(s), PLUGIN_XML);
if (ideaPluginDescriptor != null) {
result.add(ideaPluginDescriptor);
}
}
}
@SuppressWarnings({"UseOfSystemOutOrSystemErr", "CallToPrintStackTrace"})
private static void loadDescriptorsFromClassPath(final List<IdeaPluginDescriptorImpl> result) {
try {
final Collection<URL> urls = getClassLoaderUrls();
for (URL url : urls) {
final String protocol = url.getProtocol();
if ("file".equals(protocol)) {
final File file = new File(URLDecoder.decode(url.getFile()));
//final String canonicalPath = file.getCanonicalPath();
//if (!canonicalPath.startsWith(homePath) || canonicalPath.endsWith(".jar")) continue;
//if (!canonicalPath.startsWith(homePath)) continue;
final IdeaPluginDescriptorImpl pluginDescriptor = loadDescriptor(file, PLUGIN_XML);
if (pluginDescriptor != null && !result.contains(pluginDescriptor)) {
result.add(pluginDescriptor);
}
}
}
}
catch (Exception e) {
System.err.println("Error loading plugins from classpath:");
e.printStackTrace();
}
}
private static Collection<URL> getClassLoaderUrls() {
final ClassLoader classLoader = PluginManager.class.getClassLoader();
final Class<? extends ClassLoader> aClass = classLoader.getClass();
if (aClass.getName().equals(UrlClassLoader.class.getName())) {
try {
return (List<URL>)aClass.getDeclaredMethod("getUrls").invoke(classLoader);
}
catch (IllegalAccessException e) {
}
catch (InvocationTargetException e) {
}
catch (NoSuchMethodException e) {
}
}
if (classLoader instanceof URLClassLoader) {
return Arrays.asList(((URLClassLoader)classLoader).getURLs());
}
return Collections.emptyList();
}
@Nullable
private static String filterBadPlugins(List<IdeaPluginDescriptorImpl> result) {
final Map<PluginId, IdeaPluginDescriptorImpl> idToDescriptorMap = new HashMap<PluginId, IdeaPluginDescriptorImpl>();
final StringBuffer message = new StringBuffer();
boolean pluginsWithoutIdFound = false;
for (Iterator<IdeaPluginDescriptorImpl> it = result.iterator(); it.hasNext();) {
final IdeaPluginDescriptorImpl descriptor = it.next();
final PluginId id = descriptor.getPluginId();
if (id == null) {
pluginsWithoutIdFound = true;
}
if (idToDescriptorMap.containsKey(id)) {
if (message.length() > 0) {
message.append("\n");
}
message.append(IdeBundle.message("message.duplicate.plugin.id"));
message.append(id);
it.remove();
}
else if (descriptor.isEnabled()) {
idToDescriptorMap.put(id, descriptor);
}
}
final List<String> disabledPluginIds = new ArrayList<String>();
for (final Iterator<IdeaPluginDescriptorImpl> it = result.iterator(); it.hasNext();) {
final IdeaPluginDescriptorImpl pluginDescriptor = it.next();
checkDependants(pluginDescriptor, new Function<PluginId, IdeaPluginDescriptor>() {
public IdeaPluginDescriptor fun(final PluginId pluginId) {
return idToDescriptorMap.get(pluginId);
}
}, new Condition<PluginId>() {
public boolean value(final PluginId pluginId) {
if (!idToDescriptorMap.containsKey(pluginId)) {
if (message.length() > 0) {
message.append("\n");
}
pluginDescriptor.setEnabled(false);
disabledPluginIds.add(pluginDescriptor.getPluginId().getIdString());
message.append(getDisabledPlugins().contains(pluginId.getIdString())
? IdeBundle.message("error.required.plugin.disabled", pluginDescriptor.getPluginId(), pluginId)
: IdeBundle.message("error.required.plugin.not.installed", pluginDescriptor.getPluginId(), pluginId));
it.remove();
return false;
}
return true;
}
});
}
if (!disabledPluginIds.isEmpty()) {
try {
saveDisabledPlugins(disabledPluginIds, true);
}
catch (IOException e) {
getLogger().error(e);
}
}
if (pluginsWithoutIdFound) {
if (message.length() > 0) {
message.append("\n");
}
message.append(IdeBundle.message("error.plugins.without.id.found"));
}
if (message.length() > 0) {
message.insert(0, IdeBundle.message("error.problems.found.loading.plugins"));
return message.toString();
}
return null;
}
public static void checkDependants(final IdeaPluginDescriptor pluginDescriptor,
final Function<PluginId, IdeaPluginDescriptor> pluginId2Descriptor,
final Condition<PluginId> check) {
checkDependants(pluginDescriptor, pluginId2Descriptor, check, new HashSet<PluginId>());
}
private static boolean checkDependants(final IdeaPluginDescriptor pluginDescriptor,
final Function<PluginId, IdeaPluginDescriptor> pluginId2Descriptor,
final Condition<PluginId> check,
final Set<PluginId> processed) {
processed.add(pluginDescriptor.getPluginId());
final PluginId[] dependentPluginIds = pluginDescriptor.getDependentPluginIds();
final Set<PluginId> optionalDependencies = new HashSet<PluginId>(Arrays.asList(pluginDescriptor.getOptionalDependentPluginIds()));
for (final PluginId dependentPluginId : dependentPluginIds) {
if (processed.contains(dependentPluginId)) continue;
if (!optionalDependencies.contains(dependentPluginId)) {
if (!check.value(dependentPluginId)) {
return false;
}
final IdeaPluginDescriptor dependantPluginDescriptor = pluginId2Descriptor.fun(dependentPluginId);
if (dependantPluginDescriptor != null && !checkDependants(dependantPluginDescriptor, pluginId2Descriptor, check, processed)) {
return false;
}
}
}
return true;
}
@Nullable
private static String getBuildNumber() {
if (ourBuildNumber == null) {
try {
ourBuildNumber = new String(FileUtil.loadFileText(new File(PathManager.getHomePath() + "/build.txt"))).trim();
}
catch (IOException e) {
ourBuildNumber = null;
}
}
return ourBuildNumber;
}
private static void loadDescriptors(String pluginsPath, List<IdeaPluginDescriptorImpl> result) {
final File pluginsHome = new File(pluginsPath);
final File[] files = pluginsHome.listFiles();
if (files != null) {
for (File file : files) {
final IdeaPluginDescriptorImpl descriptor = loadDescriptor(file, PLUGIN_XML);
if (descriptor != null && !result.contains(descriptor)) {
result.add(descriptor);
}
}
}
}
@SuppressWarnings({"HardCodedStringLiteral"})
@Nullable
private static IdeaPluginDescriptorImpl loadDescriptor(final File file, final @NonNls String fileName) {
IdeaPluginDescriptorImpl descriptor = null;
if (file.isDirectory()) {
descriptor = loadDescriptorFromDir(file, fileName);
if (descriptor == null) {
File libDir = new File(file, "lib");
if (!libDir.isDirectory()) {
return null;
}
final File[] files = libDir.listFiles();
if (files == null || files.length == 0) {
return null;
}
for (final File f : files) {
if (isJarOrZip(f)) {
IdeaPluginDescriptorImpl descriptor1 = loadDescriptorFromJar(f, fileName);
if (descriptor1 != null) {
if (descriptor != null) {
getLogger().info("Cannot load " + file + " because two or more plugin.xml's detected");
return null;
}
descriptor = descriptor1;
descriptor.setPath(file);
}
}
else if (f.isDirectory()) {
IdeaPluginDescriptorImpl descriptor1 = loadDescriptorFromDir(f, fileName);
if (descriptor1 != null) {
if (descriptor != null) {
getLogger().info("Cannot load " + file + " because two or more plugin.xml's detected");
return null;
}
descriptor = descriptor1;
descriptor.setPath(file);
}
}
}
}
}
else if (StringUtil.endsWithIgnoreCase(file.getName(), ".jar")) {
descriptor = loadDescriptorFromJar(file, fileName);
}
if (descriptor != null && !descriptor.getOptionalConfigs().isEmpty()) {
final Map<PluginId, IdeaPluginDescriptorImpl> descriptors = new HashMap<PluginId, IdeaPluginDescriptorImpl>(descriptor.getOptionalConfigs().size());
for (Map.Entry<PluginId, String> entry: descriptor.getOptionalConfigs().entrySet()) {
final IdeaPluginDescriptorImpl optionalDescriptor = loadDescriptor(file, entry.getValue());
if (optionalDescriptor != null) {
descriptors.put(entry.getKey(), optionalDescriptor);
}
}
descriptor.setOptionalDescriptors(descriptors);
}
return descriptor;
}
@Nullable
private static IdeaPluginDescriptorImpl loadDescriptorFromDir(final File file, @NonNls String fileName) {
IdeaPluginDescriptorImpl descriptor = null;
File descriptorFile = new File(file, META_INF + File.separator + fileName);
if (descriptorFile.exists()) {
descriptor = new IdeaPluginDescriptorImpl(file);
try {
descriptor.readExternal(descriptorFile.toURL());
}
catch (Exception e) {
System.err.println("Cannot load: " + descriptorFile.getAbsolutePath());
e.printStackTrace();
}
}
return descriptor;
}
@Nullable
private static IdeaPluginDescriptorImpl loadDescriptorFromJar(File file, @NonNls String fileName) {
try {
IdeaPluginDescriptorImpl descriptor = new IdeaPluginDescriptorImpl(file);
URL fileURL = file.toURL();
URL jarURL = new URL(
"jar:" + fileURL.toExternalForm() + "!/META-INF/" + fileName
);
descriptor.readExternal(jarURL);
return descriptor;
}
catch (FileNotFoundException e) {
return null;
}
catch (Exception e) {
getLogger().info("Cannot load " + file, e);
}
return null;
}
@SuppressWarnings({"HardCodedStringLiteral"})
protected void bootstrap(List<URL> classpathElements) {
UrlClassLoader newClassLoader = initClassloader(classpathElements);
try {
final Class mainClass = Class.forName(getClass().getName(), true, newClassLoader);
Field field = mainClass.getDeclaredField("ourMainClass");
field.setAccessible(true);
field.set(null, ourMainClass);
field = mainClass.getDeclaredField("ourMethodName");
field.setAccessible(true);
field.set(null, ourMethodName);
field = mainClass.getDeclaredField("ourArguments");
field.setAccessible(true);
field.set(null, ourArguments);
final Method startMethod = mainClass.getDeclaredMethod("start");
startMethod.setAccessible(true);
startMethod.invoke(null, ArrayUtil.EMPTY_OBJECT_ARRAY);
}
catch (Exception e) {
Logger logger = getLogger();
if (logger == null) {
e.printStackTrace(System.err);
}
else {
logger.error(e);
}
}
}
public UrlClassLoader initClassloader(final List<URL> classpathElements) {
PathManager.loadProperties();
try {
addParentClasspath(classpathElements);
addIDEALibraries(classpathElements);
addAdditionalClassPath(classpathElements);
}
catch (IllegalArgumentException e) {
if (Main.isHeadless()) {
getLogger().error(e);
} else {
JOptionPane
.showMessageDialog(JOptionPane.getRootFrame(), e.getMessage(), CommonBundle.getErrorTitle(), JOptionPane.INFORMATION_MESSAGE);
}
System.exit(1);
}
catch (MalformedURLException e) {
if (Main.isHeadless()) {
getLogger().error(e.getMessage());
} else {
JOptionPane
.showMessageDialog(JOptionPane.getRootFrame(), e.getMessage(), CommonBundle.getErrorTitle(), JOptionPane.INFORMATION_MESSAGE);
}
System.exit(1);
}
filterClassPath(classpathElements);
UrlClassLoader newClassLoader = null;
try {
newClassLoader = new UrlClassLoader(classpathElements, null, true, true);
// prepare plugins
if (!isLoadingOfExternalPluginsDisabled()) {
initPluginClasses();
try {
StartupActionScriptManager.executeActionScript();
}
catch (IOException e) {
final String errorMessage = "Error executing plugin installation script: " + e.getMessage();
if (Main.isHeadless()) {
System.out.println(errorMessage);
} else {
JOptionPane
.showMessageDialog(JOptionPane.getRootFrame(), errorMessage, CommonBundle.getErrorTitle(), JOptionPane.INFORMATION_MESSAGE);
}
}
}
Thread.currentThread().setContextClassLoader(newClassLoader);
}
catch (Exception e) {
Logger logger = getLogger();
if (logger == null) {
e.printStackTrace(System.err);
}
else {
logger.error(e);
}
}
return newClassLoader;
}
private static void filterClassPath(final List<URL> classpathElements) {
final String ignoreProperty = System.getProperty(PROPERTY_IGNORE_CLASSPATH);
if (ignoreProperty == null) return;
final Pattern pattern = Pattern.compile(ignoreProperty);
for (Iterator<URL> i = classpathElements.iterator(); i.hasNext();) {
URL url = i.next();
final String u = url.toExternalForm();
if (pattern.matcher(u).matches()) {
i.remove();
}
}
}
@Nullable
private static ClassLoader createPluginClassLoader(final File[] classPath,
final ClassLoader[] parentLoaders,
IdeaPluginDescriptor pluginDescriptor) {
if (pluginDescriptor.getUseIdeaClassLoader()) {
try {
final ClassLoader loader = PluginManager.class.getClassLoader();
final Method addUrlMethod = getAddUrlMethod(loader);
for (File aClassPath : classPath) {
final File file = aClassPath.getCanonicalFile();
addUrlMethod.invoke(loader, file.toURL());
}
return loader;
}
catch (NoSuchMethodException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
catch (IllegalAccessException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
catch (InvocationTargetException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
PluginId pluginId = pluginDescriptor.getPluginId();
File pluginRoot = pluginDescriptor.getPath();
if (ApplicationManager.getApplication().isUnitTestMode()) return null;
try {
final List<URL> urls = new ArrayList<URL>(classPath.length);
for (File aClassPath : classPath) {
final File file = aClassPath.getCanonicalFile(); // it is critical not to have "." and ".." in classpath elements
urls.add(file.toURL());
}
return new PluginClassLoader(urls, parentLoaders, pluginId, pluginRoot);
}
catch (MalformedURLException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
return null;
}
@SuppressWarnings({"HardCodedStringLiteral"})
private static Method getAddUrlMethod(final ClassLoader loader) throws NoSuchMethodException {
if (loader instanceof URLClassLoader) {
final Method addUrlMethod = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
addUrlMethod.setAccessible(true);
return addUrlMethod;
}
return loader.getClass().getDeclaredMethod("addURL", URL.class);
}
private void addParentClasspath(List<URL> aClasspathElements) throws MalformedURLException {
final ClassLoader loader = getClass().getClassLoader();
if (loader instanceof URLClassLoader) {
URLClassLoader urlClassLoader = (URLClassLoader)loader;
aClasspathElements.addAll(Arrays.asList(urlClassLoader.getURLs()));
}
else {
try {
Class antClassLoaderClass = Class.forName("org.apache.tools.ant.AntClassLoader");
if (antClassLoaderClass.isInstance(loader) ||
loader.getClass().getName().equals("org.apache.tools.ant.AntClassLoader") ||
loader.getClass().getName().equals("org.apache.tools.ant.loader.AntClassLoader2")) {
//noinspection HardCodedStringLiteral
final String classpath =
(String)antClassLoaderClass.getDeclaredMethod("getClasspath", ArrayUtil.EMPTY_CLASS_ARRAY).invoke(loader, ArrayUtil.EMPTY_OBJECT_ARRAY);
final StringTokenizer tokenizer = new StringTokenizer(classpath, File.separator, false);
while (tokenizer.hasMoreTokens()) {
final String token = tokenizer.nextToken();
aClasspathElements.add(new File(token).toURL());
}
}
else {
getLogger().warn("Unknown classloader: " + loader.getClass().getName());
}
}
catch (ClassCastException e) {
getLogger().warn("Unknown classloader [CCE]: " + e.getMessage());
}
catch (ClassNotFoundException e) {
getLogger().warn("Unknown classloader [CNFE]: " + loader.getClass().getName());
}
catch (NoSuchMethodException e) {
getLogger().warn("Unknown classloader [NSME]: " + e.getMessage());
}
catch (IllegalAccessException e) {
getLogger().warn("Unknown classloader [IAE]: " + e.getMessage());
}
catch (InvocationTargetException e) {
getLogger().warn("Unknown classloader [ITE]: " + e.getMessage());
}
}
}
private static void addIDEALibraries(List<URL> classpathElements) {
final String ideaHomePath = PathManager.getHomePath();
addAllFromLibFolder(ideaHomePath, classpathElements);
}
@SuppressWarnings({"HardCodedStringLiteral"})
public static void addAllFromLibFolder(final String aFolderPath, List<URL> classPath) {
try {
final Class<PluginManager> aClass = PluginManager.class;
final String selfRoot = PathManager.getResourceRoot(aClass, "/" + aClass.getName().replace('.', '/') + ".class");
final URL selfRootUrl = new File(selfRoot).getAbsoluteFile().toURL();
classPath.add(selfRootUrl);
final File libFolder = new File(aFolderPath + File.separator + "lib");
addLibraries(classPath, libFolder, selfRootUrl);
final File antLib = new File(new File(libFolder, "ant"), "lib");
addLibraries(classPath, antLib, selfRootUrl);
}
catch (MalformedURLException e) {
getLogger().error(e);
}
}
private static void addLibraries(List<URL> classPath, File fromDir, final URL selfRootUrl) throws MalformedURLException {
final File[] files = fromDir.listFiles();
if (files != null) {
for (final File file : files) {
if (!isJarOrZip(file)) {
continue;
}
final URL url = file.toURL();
if (selfRootUrl.equals(url)) {
continue;
}
classPath.add(url);
}
}
}
@SuppressWarnings({"HardCodedStringLiteral"})
private static boolean isJarOrZip(File file) {
if (file.isDirectory()) {
return false;
}
final String name = file.getName();
return StringUtil.endsWithIgnoreCase(name, ".jar") || StringUtil.endsWithIgnoreCase(name, ".zip");
}
private static void addAdditionalClassPath(List<URL> classPath) {
try {
//noinspection HardCodedStringLiteral
final StringTokenizer tokenizer = new StringTokenizer(System.getProperty("idea.additional.classpath", ""), File.pathSeparator, false);
while (tokenizer.hasMoreTokens()) {
String pathItem = tokenizer.nextToken();
classPath.add(new File(pathItem).toURL());
}
}
catch (MalformedURLException e) {
getLogger().error(e);
}
}
@SuppressWarnings({"HardCodedStringLiteral"})
private static boolean isLoadingOfExternalPluginsDisabled() {
return !"true".equalsIgnoreCase(System.getProperty("idea.plugins.load", "true"));
}
public static boolean isPluginInstalled(PluginId id) {
return (getPlugin(id) != null);
}
@Nullable
public static IdeaPluginDescriptor getPlugin(PluginId id) {
final IdeaPluginDescriptor[] plugins = getPlugins();
for (final IdeaPluginDescriptor plugin : plugins) {
if (Comparing.equal(id, plugin.getPluginId())) {
return plugin;
}
}
return null;
}
public static void addPluginClass(String className, PluginId pluginId) {
synchronized(PLUGIN_CLASSES_LOCK) {
if (ourPluginClasses == null) {
ourPluginClasses = new THashMap<String, PluginId>();
}
ourPluginClasses.put(className, pluginId);
}
}
public static boolean isPluginClass(String className) {
return getPluginByClassName(className) != null;
}
@Nullable
public static PluginId getPluginByClassName(String className) {
synchronized (PLUGIN_CLASSES_LOCK) {
return ourPluginClasses != null ? ourPluginClasses.get(className) : null;
}
}
public static void saveDisabledPlugins(List<String> ids, boolean append) throws IOException {
File plugins = new File(PathManager.getConfigPath(), PluginManager.DISABLED_PLUGINS_FILENAME);
if (!plugins.isFile()) {
plugins.createNewFile();
}
PrintWriter printWriter = null;
try {
printWriter = new PrintWriter(new BufferedWriter(new FileWriter(plugins, append)));
for (String id : ids) {
printWriter.println(id);
}
printWriter.flush();
}
finally {
if (printWriter != null) {
printWriter.close();
}
}
}
public static List<String> getDisabledPlugins() {
if (ourDisabledPlugins == null) {
ourDisabledPlugins = new ArrayList<String>();
if (System.getProperty("idea.ignore.disabled.plugins") == null) {
final File file = new File(PathManager.getConfigPath(), DISABLED_PLUGINS_FILENAME);
if (file.isFile()) {
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(file));
String id;
while ((id = reader.readLine()) != null) {
ourDisabledPlugins.add(id.trim());
}
}
catch (IOException e) {
//do nothing
}
finally {
try {
if (reader != null) {
reader.close();
}
}
catch (IOException e) {
//do nothing
}
}
}
}
}
return ourDisabledPlugins;
}
private static class IdeaLogProvider implements LogProvider {
public void error(String message) {
getLogger().error(message);
}
public void error(String message, Throwable t) {
getLogger().error(message, t);
}
public void error(Throwable t) {
getLogger().error(t);
}
public void warn(String message) {
getLogger().info(message);
}
public void warn(String message, Throwable t) {
getLogger().info(message, t);
}
public void warn(Throwable t) {
getLogger().info(t);
}
}
}
|
package me.elrod.pureio;
import java.io.*;
import java.util.ArrayList;
/**
* A compatibility interpreter for our free IO monads to make them do things in
* a way that a typical Java environment might expect.
*
* You could implement your own to do cooler, better, things.
*/
public class UnsafePerformIO {
final static BufferedReader in =
new BufferedReader(new InputStreamReader(System.in));
public static <A> A unsafePerformIO(PureIO<A> t) {
return t.cata(
a -> a,
a -> a.cata(
(s, tt) -> {
System.out.println(s);
return unsafePerformIO(tt);
},
f -> {
try {
String s = in.readLine();
return unsafePerformIO(f.apply(s));
} catch (IOException e) {
throw new RuntimeException(e);
}
},
(ec, tt) -> {
System.exit(ec);
return unsafePerformIO(tt);
}));
}
/**
* Same thing as {@link unsafePerformIO} except for {@link PureIOT}-style
* trampolining.
*/
public static <A> PureIOT<A> unsafePerformIOT(TerminalOperation<PureIOT<A>> t) {
return t.cata(
(s, tt) -> {
System.out.println(s);
return tt;
},
f -> {
try {
String s = in.readLine();
return f.apply(s);
} catch (IOException e) {
throw new RuntimeException(e);
}
},
(ec, tt) -> {
System.exit(ec);
return tt;
});
}
public static <A> A unsafePerformFileIO(PureFileIO<A> t) {
return t.cata(
a -> a,
a -> a.cata(
(filename, f) -> {
try {
ArrayList<String> al = new ArrayList<String>();
BufferedReader in = new BufferedReader(new FileReader(filename));
while (in.ready()) {
al.add(in.readLine());
}
in.close();
return unsafePerformFileIO(f.apply(LinkedList.fromArray(al.toArray(new String[0]))));
} catch (IOException e) {
throw new RuntimeException(e);
}
},
(data, f) -> {
try(PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(data.run1(), true)))) {
out.print(data.run2());
return unsafePerformFileIO(f);
} catch (IOException e) {
throw new RuntimeException(e);
}
}));
}
}
|
package org.jboss.test.virtual.test;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.URI;
import java.net.URL;
import java.util.Collections;
import java.util.Map;
import org.jboss.virtual.VFS;
import org.jboss.virtual.VFSUtils;
import org.jboss.virtual.VirtualFile;
import org.jboss.virtual.plugins.context.jar.JarContextFactory;
import org.jboss.virtual.spi.VFSContext;
import org.jboss.virtual.spi.VFSContextFactory;
import org.jboss.virtual.spi.VFSContextFactoryLocator;
import org.jboss.virtual.spi.cache.CacheStatistics;
import org.jboss.virtual.spi.cache.VFSCache;
import org.jboss.virtual.spi.cache.VFSCacheFactory;
import org.jboss.virtual.spi.cache.helpers.NoopVFSCache;
/**
* VFSCache Test.
*
* @author <a href="ales.justin@jboss.com">Ales Justin</a>
*/
public abstract class VFSCacheTest extends AbstractVFSRegistryTest
{
public VFSCacheTest(String name)
{
super(name);
}
protected abstract VFSCache createCache();
protected void configureCache(VFSCache cache) throws Exception
{
}
protected void stopCache(VFSCache cache)
{
if (cache != null)
cache.stop();
}
private static Map<String, VFSContextFactory> getFactoryByProtocol()
{
try
{
Field field = VFSContextFactoryLocator.class.getDeclaredField("factoryByProtocol");
field.setAccessible(true);
return (Map<String, VFSContextFactory>) field.get(null);
}
catch (SecurityException e)
{
throw new RuntimeException(e);
}
catch (NoSuchFieldException e)
{
throw new RuntimeException(e);
}
catch (IllegalArgumentException e)
{
throw new RuntimeException(e);
}
catch (IllegalAccessException e)
{
throw new RuntimeException(e);
}
}
@SuppressWarnings("deprecation")
public void testCache() throws Exception
{
URL url = getResource("/vfs/test/nested");
VFSCache cache = createCache();
cache.start();
try
{
VFSCacheFactory.setInstance(cache);
try
{
configureCache(cache);
VirtualFile root = VFS.getRoot(url);
VirtualFile file = root.findChild("/nested.jar/META-INF/empty.txt");
URL fileURL = file.toURL();
VirtualFile nested = root.findChild("/nested.jar/complex.jar/subfolder/subsubfolder/subsubchild");
URL nestedURL = nested.toURL();
assertEquals(file, VFS.getRoot(fileURL));
assertEquals(nested, VFS.getRoot(nestedURL));
VFSCacheFactory.setInstance(null);
VFSCache wrapper = new WrapperVFSCache(cache);
VFSCacheFactory.setInstance(wrapper);
assertEquals(file, VFS.getRoot(fileURL));
assertEquals(nested, VFS.getRoot(nestedURL));
}
finally
{
VFSCacheFactory.setInstance(null);
}
}
finally
{
stopCache(cache);
}
}
protected abstract void testCachedContexts(Iterable<VFSContext> iter);
public void testCacheStatistics() throws Exception
{
URL url = getResource("/vfs/test/nested");
VFSCache cache = createCache();
cache.start();
try
{
if (cache instanceof CacheStatistics)
{
CacheStatistics statistics = CacheStatistics.class.cast(cache);
VFSCacheFactory.setInstance(cache);
try
{
configureCache(cache);
VirtualFile root = VFS.getRoot(url);
assertNotNull(root);
Iterable<VFSContext> iter = statistics.getCachedContexts();
testCachedContexts(iter);
assertEquals(1, statistics.size());
assertTrue(statistics.lastInsert() != 0);
}
finally
{
VFSCacheFactory.setInstance(null);
}
}
}
finally
{
stopCache(cache);
}
}
protected Class<? extends VFSCache> getCacheClass()
{
VFSCache cache = createCache();
return cache.getClass();
}
protected Iterable<String> populateRequiredSystemProperties()
{
return Collections.emptySet();
}
protected abstract Map<Object, Object> getMap();
public void testCacheFactory() throws Exception
{
VFSCache cache = null;
String cacheClassName = getCacheClass().getName();
VFSCacheFactory.setInstance(null);
try
{
Iterable<String> keys = populateRequiredSystemProperties();
try
{
cache = VFSCacheFactory.getInstance(cacheClassName);
assertNotNull(cache);
assertTrue(cache instanceof NoopVFSCache == false);
cache.flush();
}
finally
{
for (String key : keys)
System.clearProperty(key);
}
VFSCacheFactory.setInstance(null);
VFSCache mapParamCache = VFSCacheFactory.getInstance(cacheClassName, getMap());
// need new instance, so we know we're really testing map parameter
assertNotSame(cache, mapParamCache);
cache = mapParamCache;
assertNotNull(cache);
assertTrue(cache instanceof NoopVFSCache == false);
cache.flush();
}
finally
{
stopCache(cache);
VFSCacheFactory.setInstance(null);
}
}
public void testJarPath() throws Exception
{
// to circumvent another bug in VFS
Map<String, VFSContextFactory> factoryByProtocol = getFactoryByProtocol();
VFSContextFactory oldFactory = factoryByProtocol.put("jar", new JarContextFactory());
VFSCache cache = createCache();
cache.start();
try
{
VFSCacheFactory.setInstance(cache);
try
{
configureCache(cache);
URL url = getResource("/vfs/test/jar1.jar");
URL manifestURL = new URL("jar:" + url.toExternalForm() + "!/META-INF/MANIFEST.MF");
// first we ask for a jar:file: resource
VirtualFile manifest = VFS.getRoot(manifestURL);
assertNotNull(manifest);
// then we ask for a file: resource
VirtualFile jar = VFS.getRoot(url);
assertNotNull(jar);
}
catch(IOException e)
{
fail("failed to get the proper files: " + e.getMessage());
}
finally
{
VFSCacheFactory.setInstance(null);
}
}
finally
{
factoryByProtocol.put("jar", oldFactory);
stopCache(cache);
}
}
/**
* VFSCache assumes that VFSUtils.stripProtocol gives out usable paths
* to key upon.
*/
public void testVFSUtilsStripProtocol() throws Exception
{
URL url = getResource("/vfs/test/jar1.jar");
VirtualFile manifest = VFS.getRoot(url).getChild("META-INF/MANIFEST.MF");
String expected = VFSUtils.stripProtocol(manifest.toURI());
URL manifestURL = new URL("jar:" + url.toExternalForm() + "!/META-INF/MANIFEST.MF");
String actual = VFSUtils.stripProtocol(manifestURL.toURI());
assertEquals("path from jar:file: url is not usable", expected, actual);
}
private class WrapperVFSCache implements VFSCache
{
private VFSCache delegate;
private WrapperVFSCache(VFSCache delegate)
{
this.delegate = delegate;
}
public VFSContext findContext(URI uri)
{
return delegate.findContext(uri);
}
public VFSContext findContext(URL url)
{
return delegate.findContext(url);
}
public void putContext(VFSContext context)
{
throw new IllegalArgumentException("Context should already be there: " + context);
}
public void removeContext(VFSContext context)
{
}
public void start() throws Exception
{
}
public void stop()
{
}
public void flush()
{
}
}
}
|
package uk.gov.dfid.common.search;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeBuilder;
import org.elasticsearch.search.SearchHit;
public class ElasticSearch {
private static Client client;
private static Node node;
public static void connectToESNode(String dataLocation) {
node = NodeBuilder
.nodeBuilder()
.local(true)
.data(true)
.settings(ImmutableSettings.settingsBuilder().put("path.data", dataLocation)).node();
node.client().admin().cluster().prepareHealth().setWaitForYellowStatus().execute().actionGet();
client = node.client();
}
/**
* Shut down all connections
*/
public void shutdown() {
if (node != null)
node.close();
if (client != null)
client.close();
}
public void deleteAll(){
client.admin().indices().prepareDelete().execute().actionGet();
}
public IndexResponse putIndex(Map<String, Object> indexMap, String indexName, String dataLocation) {
if(client == null){
connectToESNode(dataLocation);
}
IndexRequest indexRequest = new IndexRequest(indexName);
indexRequest.type("index");
indexRequest.source(indexMap);
return client.index(indexRequest).actionGet();
}
public static List<Map<String, String>> search(String search, String dataLocation) {
if(client == null){
System.out.println("Launching ES client");
connectToESNode(dataLocation);
System.out.println("ES client launched");
}
Long counter = System.currentTimeMillis();
List<Map<String, String>> results = new ArrayList<Map<String, String>>();
SearchResponse response = client.prepareSearch()
.setQuery(QueryBuilders.queryString(search).defaultOperator(org.elasticsearch.index.query.QueryStringQueryBuilder.Operator.AND)).setSize(300).execute()
.actionGet();
SearchHit[] hits = response.getHits().getHits();
for (SearchHit hit : hits) {
Map<String, String> hitMap = (HashMap<String, String>)(Object)hit.getSource();
results.add(hitMap);
}
System.out.println(new StringBuilder().append("Keyword '").append(search).append("'")
.append(" numer of results: ").append(results.size())
.append(", took (s): ").append( (System.currentTimeMillis() - counter)/(float)1000) );
return results;
}
}
|
package logic_subsystem;
import com.badlogic.gdx.math.Vector2;
import player.HumanPlayer;
import player.StatsClass;
import xidecsc.ExistenceCreator;
import xidecsc.StateContainer;
import xidecsc.XiabweenInternalDatabase;
public class GameMaster {
protected XiabweenInternalDatabase database;
protected ExistenceCreator creator;
protected StateContainer container;
public GameMaster() {
this.database = new XiabweenInternalDatabase();
this.creator = new ExistenceCreator();
this.container = new StateContainer();
}
// Finally, some very high level abstraction.
/**
* Adds a new player to the game. Also internally adds a player to the
* internal database.
*
* @param spawnLocation
* Player's spawning location that it will be born in.
* @param initialDirection
* Player's initial direction it will face.
* @param name
* The character's name that will appear in game.
* @param texture
* The sprite sheet file to be used by the player.
* @param stats
* A structure of the player's stats.
* @return
*/
public void addPlayerToGame(Vector2 spawnLocation, float initialDirection, String name, String texture, StatsClass stats) {
HumanPlayer newPlayer = creator.createNewHumanPlayer(spawnLocation.x, spawnLocation.y, initialDirection, name, texture, stats);
database.addPlayerToDatabase(newPlayer);
}
public void removePlayerFromGame(HumanPlayer player) throws IllegalAccessException {
boolean result = database.removePlayerFromDatabase(player);
if (!result) {
throw new IllegalAccessException();
}
}
}
|
package mainpackage.Screens;
import org.lwjgl.input.Keyboard;
import mainpackage.Game;
import mainpackage.PlayerInput;
import mainpackage.SpriteClass;
import mainpackage.TextureFiles;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.audio.Music;
import com.badlogic.gdx.audio.Sound;
import com.badlogic.gdx.graphics.GL10;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
public class GameScreen implements Screen {
// variables
private Game game;
private SpriteClass spriteClass = new SpriteClass();
private SpriteBatch batch;
private TextureAtlas atlas;
private Skin skin;
private Music battleMusic;
private Sound attack01;
private Texture backgroundTex, hpBarLeftTex, hpBarRightTex, roundsTex, player01SmallTex;
private int playerXPos = 50, playerYPos = 15, moveSpeed = 4, player01State = 0, curAction = 0, optionIndex =0;
private boolean isKeyPressed, isFacingRight = true, isPaused = false, grounded = true, timeUp;
private float velocityX, velocityY, gravity = 5, elapsedTime, jumpStrength = 100;
private Texture curAnimation, selectorTex, pauseFilterTex, pauseMenuTex;
private int[] pauseOptions = new int[3];
private Texture[] pauseHelpTxts = new Texture[3];
private int[] optionPositions = new int[4];
// constructor to keep a reference to the main Game class
public GameScreen(Game game) {
super();
this.game = game;
battleMusic = Gdx.audio.newMusic(Gdx.files.internal("assets/audioFiles/battleMusic/battleMusic01.mp3"));
attack01 = Gdx.audio.newSound(Gdx.files.internal("assets/audioFiles/ichigoCombat/ichigoAttack01.wav"));
backgroundTex = new Texture(Gdx.files.internal("assets/sprites/backgrounds/battle_BG_01.png"));
selectorTex = new Texture(Gdx.files.internal("assets/gui/selectorTex.png"));
pauseFilterTex = new Texture(Gdx.files.internal("assets/gui/pauseFilter.png"));
pauseMenuTex = new Texture(Gdx.files.internal("assets/gui/pauseMenuTex.png"));
pauseHelpTxts[0] = new Texture(Gdx.files.internal("assets/gui/pauseHelpTxt01.png"));
pauseHelpTxts[1] = new Texture(Gdx.files.internal("assets/gui/pauseHelpTxt02.png"));
pauseHelpTxts[2] = new Texture(Gdx.files.internal("assets/gui/pauseHelpTxt03.png"));
//stores the Y coordinates of the pause menu options in pixels
pauseOptions[0] = 350;
pauseOptions[1] = 300;
pauseOptions[2] = 250;
}
// called when the screen should render itself
public void render(float delta) {
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
//enabling keyboard events
PlayerInput playerInput = new PlayerInput(game);
// set the input processor
Gdx.input.setInputProcessor(playerInput);
//checks for when the player is touching the ground
if(playerYPos <= 15)
{
grounded = true;
}
//applies gravity
if(grounded == false)
{
playerYPos -= gravity;
}
//sent to SpriteClass to tell it what the current character and animation are
spriteClass.setSheetVals(1, curAction);
//calls SpriteClass to get the current animation and stores it
curAnimation = spriteClass.setAnimation();
batch.begin();
//draws the background texture (texture, x-coordinate, y-coordinate, width, height, source width, source height, horizontal flip, vertical flip)
batch.draw(backgroundTex, 0, 0, 800, 600, 0, 0, backgroundTex.getWidth(), backgroundTex.getHeight(), false, false);
//draws player 1 (texture, source x-coordinate, source y-coordinate, source width, source height, x-coordinate, y-coordinate)
batch.draw(new TextureRegion(curAnimation, spriteClass.getFrameIndex() * (curAnimation.getWidth() / 6), 0, curAnimation.getWidth() / 6, curAnimation.getHeight()) , playerXPos, playerYPos);
//draws the pause menu
if(isPaused == true)
{
//draws the black filter to create dimming effect
batch.draw(pauseFilterTex, 0, 0);
//draws the pause menu
batch.draw(pauseMenuTex, 250, 200);
//draws the help text-boxes
batch.draw(pauseHelpTxts[optionIndex], 360, 450);
//draws the selector (texture, source x-coordinate, source y-coordinate, source width, source height, x-coordinate, y-coordinate)
batch.draw(new TextureRegion(selectorTex, spriteClass.getFrameIndex() * (selectorTex.getWidth() / 6), 0, selectorTex.getWidth() / 6, selectorTex.getHeight()) , 300, pauseOptions[optionIndex]);
}
batch.end();
//checks if the game is not paused
if(isPaused == false)
{
//checks if player 1 is grounded
if(grounded == true)
{
// checks if no key is pressed down
if (isKeyPressed == Gdx.input.isKeyPressed(Keyboard.KEY_NONE) && player01State == 0)
{
if (isFacingRight == true)
{
curAction = 0;
}
else
{
curAction = 1;
}
}
// checks if an attack has finished
if (player01State == 1 || player01State == 3)
{
if (spriteClass.getFrameIndex() >= 5)
{
player01State = 0;
}
}
// handles keyboard input involving held down key actions
//checks if player 1 is at the left edge of the screen
if (playerXPos > 0)
{
//checks if the left arrow key has been held down
if (isKeyPressed = Gdx.input.isKeyPressed(Keys.LEFT) && player01State == 0)
{
curAction = 3;
isFacingRight = false;
playerXPos -= moveSpeed;
}
}
//checks if the player is at the right edge of the screen
if (playerXPos < 800 - (curAnimation.getWidth() / 6))
{
//checks if the right arrow key has been held down
if (isKeyPressed = Gdx.input.isKeyPressed(Keys.RIGHT) && player01State == 0) {
curAction = 2;
isFacingRight = true;
playerXPos += moveSpeed;
}
}
//checks if the down arrow key has been held down
if (isKeyPressed = Gdx.input.isKeyPressed(Keys.DOWN) && (player01State == 0 || player01State == 2))
{
player01State = 3;
if (isFacingRight == true)
{
curAction = 6;
}
else
{
curAction = 7;
}
//checks if the block key is also held down while the down arrow key is held down
if (isKeyPressed = Gdx.input.isKeyPressed(Keys.N))
{
player01State = 2;
if (isFacingRight == true)
{
curAction = 10;
}
else
{
curAction = 11;
}
}
}
//checks if the block key is held down
if (isKeyPressed = Gdx.input.isKeyPressed(Keys.N) && player01State == 0) {
player01State = 2;
if (isFacingRight == true) {
curAction = 8;
}
else
{
curAction = 9;
}
}
}
}
else
{
//pauses the music whenever the game is paused
battleMusic.pause();
}
}
// called when the anything needs to be resized
public void resize(int width, int height) {
}
// called when this screen becomes the current screen
public void show() {
batch = new SpriteBatch();
atlas = new TextureAtlas("assets/gui/button.pack");
skin = new Skin();
skin.addRegions(atlas);
battleMusic.play();
battleMusic.setLooping(true);
battleMusic.setVolume(this.game.masterVolume);
}
// called when current screen changes to a different screen
public void hide() {
}
// called when the game is paused
public void pause() {
isPaused = true;
}
// called when the game resumes
public void resume() {
battleMusic.play();
isPaused = false;
}
// called when this screen should release all resources
public void dispose() {
batch.dispose();
skin.dispose();
atlas.dispose();
battleMusic.dispose();
}
//returns whether the game is paused or not (called in SpriteClass to pause frame Index updating)
public boolean pausedState()
{
return isPaused;
}
//handles keyboard input involving single press key actions
public void keyDown(int keycode)
{
switch(keycode)
{
//checks whether the attack key was pressed
case Keys.M:
if(isPaused == false)
{
//checks whether the player is standing up or ducking and plays the coinciding attack animation
switch(player01State)
{
case 0:
spriteClass.resetFrameIndex();
attack01.play();
player01State = 1;
if(isFacingRight == true)
{
curAction = 4;
}
else
{
curAction = 5;
}
break;
case 3:
spriteClass.resetFrameIndex();
attack01.play();
player01State = 1;
if(isFacingRight == true)
{
curAction = 12;
}
else
{
curAction = 13;
}
break;
}
}
//changes the functionality of the attack key to select when the game is paused
else
{
switch(optionIndex)
{
case 0:
resume();
break;
case 1:
break;
case 2:
game.setScreen(new MenuScreen(game));
break;
}
}
return;
//checks whether the P key was pressed
case Keys.P:
if(isPaused == false)
{
pause();
}
else
{
resume();
}
return;
//checks whether the up arrow key was pressed
case Keys.UP:
if(isPaused == false)
{
//makes player 1 jump
grounded = false;
if(grounded == false)
{
playerYPos += jumpStrength;
}
}
//changes the functionality of the up arrow key when the game is paused
else
{
optionIndex
if(optionIndex < 0)
{
optionIndex = 2;
}
}
return;
//checks whether the down arrow key was pressed
case Keys.DOWN:
//changes the functionality of the down arrow key when the game is paused
if(isPaused == true)
{
optionIndex ++;
if(optionIndex > 2)
{
optionIndex = 0;
}
}
return;
}
}
//checks when keys are released
public void keyUp(int keycode)
{
switch(keycode)
{
//checks whether the block key was released
case Keys.N:
player01State = 0;
return;
//checks whether the down arrow key was released
case Keys.DOWN:
player01State = 0;
return;
}
}
}
|
package analysis.dynamicsim;
import java.io.IOException;
import java.util.HashSet;
import javax.swing.JFrame;
import javax.swing.JProgressBar;
import javax.xml.stream.XMLStreamException;
import org.sbml.jsbml.RateRule;
import org.sbml.jsbml.Rule;
import odk.lang.FastMath;
public class SimulatorSSADirectHierarchical extends HierarchicalSimulator{
private static Long initializationTime = new Long(0);
private String modelstateID;
private boolean updateRateRule;
public SimulatorSSADirectHierarchical(String SBMLFileName, String outputDirectory, double timeLimit,
double maxTimeStep, double minTimeStep, long randomSeed, JProgressBar progress, double printInterval,
double stoichAmpValue, JFrame running, String[] interestingSpecies, String quantityType)
throws IOException, XMLStreamException {
super(SBMLFileName, outputDirectory, timeLimit, maxTimeStep, minTimeStep, progress,
printInterval, stoichAmpValue, running, interestingSpecies, quantityType);
try {
initialize(randomSeed, 1);
modelstateID = "topmodel";
} catch (IOException e2) {
e2.printStackTrace();
} catch (XMLStreamException e2) {
e2.printStackTrace();
}
}
@Override
public void simulate() {
if (sbmlHasErrorsFlag)
return;
long initTime2 = System.nanoTime();
initializationTime += System.nanoTime() - initTime2;
//SIMULATION LOOP
currentTime = 0.0;
double printTime = printInterval;
double nextEventTime = handleEvents();
while (currentTime < timeLimit && !cancelFlag && constraintFlag)
{
//EVENT HANDLING
//trigger and/or fire events, etc.
if (topmodel.noEventsFlag == false)
{
HashSet<String> affectedReactionSet = fireEvents(topmodel, "reaction", topmodel.noRuleFlag, topmodel.noConstraintsFlag);
//recalculate propensties/groups for affected reactions
if (affectedReactionSet.size() > 0)
updatePropensities(affectedReactionSet, "topmodel");
}
for(ModelState models : submodels.values())
{
if (models.noEventsFlag == false) {
HashSet<String> affectedReactionSet = fireEvents(models, "reaction", models.noRuleFlag, models.noConstraintsFlag);
//recalculate propensties/groups for affected reactions
if (affectedReactionSet.size() > 0)
updatePropensities(affectedReactionSet, models.ID);
}
}
//TSD PRINTING
//print to TSD if the next print interval arrives
//this obviously prints the previous timestep's data
// if (currentTime >= printTime) {
// if (printTime < 0)
// printTime = 0.0;
// try {
// printToTSD(printTime);
// bufferedTSDWriter.write(",\n");
// } catch (IOException e) {
// e.printStackTrace();
// printTime += printInterval;
//STEP 1: generate random numbers
double r1 = randomNumberGenerator.nextDouble();
double r2 = randomNumberGenerator.nextDouble();
//STEP 2: calculate delta_t, the time till the next reaction execution
double totalPropensity = getTotalPropensity();
double delta_t = FastMath.log(1 / r1) / totalPropensity;
double nextReactionTime = currentTime + delta_t;
nextEventTime = handleEvents();
updateRateRule = false;
if (nextReactionTime < nextEventTime && nextReactionTime < currentTime + maxTimeStep)
{
currentTime = nextReactionTime;
// perform reaction
}
else if (nextEventTime < currentTime + maxTimeStep)
{
currentTime = nextEventTime;
// print
}
else
{
currentTime += maxTimeStep;
updateRateRule = true;
// print
}
if (currentTime > timeLimit)
{
currentTime = timeLimit;
}
while (currentTime > printTime && printTime < timeLimit)
{
try {
printToTSD(printTime);
bufferedTSDWriter.write(",\n");
} catch (IOException e) {
e.printStackTrace();
}
printTime += printInterval;
running.setTitle("Progress (" + (int)((currentTime / timeLimit) * 100.0) + "%)");
//update progress bar
progress.setValue((int)((currentTime / timeLimit) * 100.0));
}
if (currentTime == nextReactionTime)
{
//STEP 3: select a reaction
String selectedReactionID = selectReaction(r2);
//if its length isn't positive then there aren't any reactions
if (!selectedReactionID.isEmpty()) {
//STEP 4: perform selected reaction and update species counts
if(modelstateID.equals("topmodel"))
{
//if its length isn't positive then there aren't any reactions
if (!selectedReactionID.isEmpty()) {
performReaction(topmodel, selectedReactionID, topmodel.noRuleFlag, topmodel.noConstraintsFlag);
HashSet<String> affectedReactionSet = getAffectedReactionSet(topmodel, selectedReactionID, true);
//STEP 5: compute affected reactions' new propensities and update total propensity
updatePropensities(affectedReactionSet, modelstateID);
}
//if (topmodel.variableToIsInAssignmentRuleMap != null &&
//topmodel.variableToIsInAssignmentRuleMap.containsKey("time"))
// performAssignmentRules(topmodel, topmodel.variableToAffectedAssignmentRuleSetMap.get("time"));
/*
if (topmodel.noEventsFlag == false) {
handleEvents(topmodel, topmodel.noRuleFlag, topmodel.noConstraintsFlag);
//step to the next event fire time if it comes before the next time step
if (!topmodel.triggeredEventQueue.isEmpty() && topmodel.triggeredEventQueue.peek().fireTime <= currentTime)
currentTime = topmodel.triggeredEventQueue.peek().fireTime;
}
*/
}
else
{
//if its length isn't positive then there aren't any reactions
if (!selectedReactionID.isEmpty()) {
performReaction(submodels.get(modelstateID), selectedReactionID, submodels.get(modelstateID).noRuleFlag, submodels.get(modelstateID).noConstraintsFlag);
HashSet<String> affectedReactionSet = getAffectedReactionSet(submodels.get(modelstateID), selectedReactionID, true);
//STEP 5: compute affected reactions' new propensities and update total propensity
updatePropensities(affectedReactionSet, modelstateID);
}
// if (topmodel.variableToIsInAssignmentRuleMap != null &&
// submodels[submodelIndex].variableToIsInAssignmentRuleMap.containsKey("time"))
//performAssignmentRules(submodels[submodelIndex], submodels[submodelIndex].variableToAffectedAssignmentRuleSetMap.get("time"));
/*
if (submodels[submodelIndex].noEventsFlag == false) {
handleEvents(submodels[submodelIndex], submodels[submodelIndex].noRuleFlag, submodels[submodelIndex].noConstraintsFlag);
//step to the next event fire time if it comes before the next time step
if (!submodels[submodelIndex].triggeredEventQueue.isEmpty() && submodels[submodelIndex].triggeredEventQueue.peek().fireTime <= currentTime)
currentTime = submodels[submodelIndex].triggeredEventQueue.peek().fireTime;
}*/
}
}
}
if(updateRateRule)
{
//updatePropensities(performRateRules(topmodel, currentTime), "topmodel");
performRateRules(topmodel, currentTime);
for(ModelState modelstate : submodels.values())
{
//updatePropensities(performRateRules(modelstate, currentTime), modelstate.ID);
performRateRules(modelstate, currentTime);
}
}
updateRules();
//update time for next iteration
//currentTime += delta_t;
} //end simulation loop
if (cancelFlag == false) {
//print the final species counts
try {
printToTSD(printTime);
} catch (IOException e) {
e.printStackTrace();
}
try {
bufferedTSDWriter.write(')');
bufferedTSDWriter.flush();
}
catch (IOException e1) {
e1.printStackTrace();
}
}
}
/**
* performs every rate rule using the current time step
*
* @param delta_t
* @return
*/
private HashSet<String> performRateRules(ModelState modelstate, double delta_t) {
HashSet<String> affectedVariables = new HashSet<String>();
for (Rule rule : modelstate.rateRulesList) {
RateRule rateRule = (RateRule) rule;
String variable = rateRule.getVariable();
//update the species count (but only if the species isn't constant) (bound cond is fine)
if (modelstate.variableToIsConstantMap.containsKey(variable) && modelstate.variableToIsConstantMap.get(variable) == false) {
if (modelstate.speciesToHasOnlySubstanceUnitsMap.containsKey(variable) &&
modelstate.speciesToHasOnlySubstanceUnitsMap.get(variable) == false) {
double currVal = modelstate.getVariableToValue(variable);
double incr = delta_t * (
evaluateExpressionRecursive(modelstate, rateRule.getMath()) *
modelstate.getVariableToValue(modelstate.speciesToCompartmentNameMap.get(variable)));
modelstate.setvariableToValueMap(variable, currVal + incr);
}
else {
double currVal = modelstate.getVariableToValue(variable);
double incr = delta_t * evaluateExpressionRecursive(modelstate, rateRule.getMath());
modelstate.setvariableToValueMap(variable, currVal + incr);
}
affectedVariables.add(variable);
}
}
return affectedVariables;
}
/**
* sets up data structures local to the SSA-Direct method
*
* @param noEventsFlag
* @param noAssignmentRulesFlag
* @param noConstraintsFlag
* @throws IOException
* @throws XMLStreamException
*/
private void initialize(long randomSeed, int runNumber)
throws IOException, XMLStreamException {
setupNonConstantSpeciesReferences(topmodel);
setupSpecies(topmodel);
setupParameters(topmodel);
setupReactions(topmodel);
setupConstraints(topmodel);
setupEvents(topmodel);
setupInitialAssignments(topmodel);
setupRules(topmodel);
setupForOutput(randomSeed, runNumber);
for(ModelState model : submodels.values())
{
setupNonConstantSpeciesReferences(model);
setupSpecies(model);
setupParameters(model);
setupReactions(model);
setupConstraints(model);
setupEvents(model);
setupInitialAssignments(model);
setupRules(model);
setupForOutput(randomSeed, runNumber);
}
setupReplacingSpecies();
bufferedTSDWriter.write("(" + "\"" + "time" + "\"");
if(interestingSpecies.length > 0)
{
for(String s : interestingSpecies)
{
bufferedTSDWriter.write(",\"" + s + "\"");
}
bufferedTSDWriter.write("),\n");
return;
}
for (String speciesID : topmodel.speciesIDSet)
{
bufferedTSDWriter.write(",\"" + speciesID + "\"");
}
for (String noConstantParam : topmodel.variablesToPrint)
{
bufferedTSDWriter.write(",\"" + noConstantParam + "\"");
}
/*
for (String compartment : topmodel.compartmentIDSet)
{
bufferedTSDWriter.write(", \"" + compartment + "\"");
}
*/
for(ModelState model : submodels.values())
{
for (String speciesID : model.speciesIDSet)
if(!model.isHierarchical.contains(speciesID))
{
bufferedTSDWriter.write(",\"" + model.ID + "__" + speciesID + "\"");
}
for (String noConstantParam : model.variablesToPrint)
if(!model.isHierarchical.contains(noConstantParam))
{
bufferedTSDWriter.write(",\"" + model.ID + "__" + noConstantParam + "\"");
}
}
bufferedTSDWriter.write("),\n");
}
/**
* updates the propensities of the reactions affected by the recently performed reaction
* @param affectedReactionSet the set of reactions affected by the recently performed reaction
*/
private void updatePropensities(HashSet<String> affectedReactionSet, String id) {
//loop through the affected reactions and update the propensities
for (String affectedReactionID : affectedReactionSet) {
if(id.equals("topmodel"))
{
if(topmodel.isDeletedByMetaID(affectedReactionID))
continue;
HashSet<StringDoublePair> reactantStoichiometrySet =
topmodel.reactionToReactantStoichiometrySetMap.get(affectedReactionID);
updatePropensities(topmodel, affectedReactionID,reactantStoichiometrySet);
}
else
{
if(submodels.get(id).isDeletedByMetaID(affectedReactionID))
continue;
HashSet<StringDoublePair> reactantStoichiometrySet =
submodels.get(id).reactionToReactantStoichiometrySetMap.get(affectedReactionID);
updatePropensities(submodels.get(id), affectedReactionID,reactantStoichiometrySet);
}
}
}
/**
* Helper method
*/
private void updatePropensities(ModelState model, String affectedReactionID, HashSet<StringDoublePair> reactantStoichiometrySet)
{
if(model.reactionToFormulaMap.get(affectedReactionID) == null)
{
model.reactionToPropensityMap.put(affectedReactionID, 0.0);
return;
}
boolean notEnoughMoleculesFlag = false;
//check for enough molecules for the reaction to occur
for (StringDoublePair speciesAndStoichiometry : reactantStoichiometrySet) {
String speciesID = speciesAndStoichiometry.string;
double stoichiometry = speciesAndStoichiometry.doub;
//if there aren't enough molecules to satisfy the stoichiometry
if (model.variableToValueMap.get(speciesID) < stoichiometry) {
notEnoughMoleculesFlag = true;
break;
}
}
double newPropensity = 0.0;
if (notEnoughMoleculesFlag == false) {
newPropensity = evaluateExpressionRecursive(model, model.reactionToFormulaMap.get(affectedReactionID));
//newPropensity = CalculatePropensityIterative(affectedReactionID);
}
double oldPropensity = model.reactionToPropensityMap.get(affectedReactionID);
//add the difference of new v. old propensity to the total propensity
model.propensity += newPropensity - oldPropensity;
//model.propensity = newPropensity - oldPropensity;
//totalPropensity += newPropensity - oldPropensity;
model.reactionToPropensityMap.put(affectedReactionID, newPropensity);
}
/**
* randomly selects a reaction to perform
*
* @param r2 random number
* @return the ID of the selected reaction
*/
private String selectReaction(double r2) {
double randomPropensity = r2 * (getTotalPropensity());
double runningTotalReactionsPropensity = 0.0;
String selectedReaction = "";
//finds the reaction that the random propensity lies in
//it keeps adding the next reaction's propensity to a running total
//until the running total is greater than the random propensity
for (String currentReaction : topmodel.reactionToPropensityMap.keySet()) {
runningTotalReactionsPropensity += topmodel.reactionToPropensityMap.get(currentReaction);
if (randomPropensity < runningTotalReactionsPropensity)
{
selectedReaction = currentReaction;
// keep track of submodel index
modelstateID = "topmodel";
return selectedReaction;
}
}
for(ModelState models : submodels.values())
{
for (String currentReaction : models.reactionToPropensityMap.keySet())
{
runningTotalReactionsPropensity += models.reactionToPropensityMap.get(currentReaction);
if (randomPropensity < runningTotalReactionsPropensity)
{
selectedReaction = currentReaction;
// keep track of submodel index
modelstateID = models.ID;
return selectedReaction;
}
}
}
return selectedReaction;
}
/**
* cancels the current run
*/
@Override
protected void cancel() {
cancelFlag = true;
}
/**
* clears data structures for new run
*/
@Override
protected void clear() {
topmodel.clear();
for(ModelState modelstate : submodels.values())
modelstate.clear();
for(String key : replacements.keySet())
replacements.put(key, initReplacementState.get(key));
/*
SBMLReader reader = new SBMLReader();
SBMLDocument document = reader.readSBML(SBMLFileName);
topmodel = new ModelState(document.getModel(), true, "");
setupSubmodels(document);
getComponentPortMap(document);
*/
}
private double handleEvents()
{
double nextEventTime = Double.POSITIVE_INFINITY;
if (topmodel.noEventsFlag == false)
{
handleEvents(topmodel);
//step to the next event fire time if it comes before the next time step
if (!topmodel.triggeredEventQueue.isEmpty() && topmodel.triggeredEventQueue.peek().fireTime <= nextEventTime)
if(topmodel.triggeredEventQueue.peek().fireTime < nextEventTime)
nextEventTime = topmodel.triggeredEventQueue.peek().fireTime;
}
for(ModelState models : submodels.values())
if (models.noEventsFlag == false){
handleEvents(models);
//step to the next event fire time if it comes before the next time step
if (!models.triggeredEventQueue.isEmpty() && models.triggeredEventQueue.peek().fireTime <= nextEventTime)
if(models.triggeredEventQueue.peek().fireTime < nextEventTime)
nextEventTime = models.triggeredEventQueue.peek().fireTime;
}
return nextEventTime;
}
/**
* does minimized initalization process to prepare for a new run
*/
@Override
protected void setupForNewRun(int newRun) {
try {
setupNonConstantSpeciesReferences(topmodel);
setupSpecies(topmodel);
setupParameters(topmodel);
setupReactions(topmodel);
setupConstraints(topmodel);
setupEvents(topmodel);
setupInitialAssignments(topmodel);
setupRules(topmodel);
setupForOutput(0, newRun);
}
catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
for(ModelState model : submodels.values())
{
try {
setupNonConstantSpeciesReferences(model);
setupSpecies(model);
setupParameters(model);
setupReactions(model);
setupConstraints(model);
setupEvents(model);
setupInitialAssignments(model);
setupRules(model);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
setupReplacingSpecies();
constraintFlag = true;
try {
for (String speciesID : topmodel.speciesIDSet) {
bufferedTSDWriter.write(",\"" + speciesID + "\"");
}
for (String noConstantParam : topmodel.variablesToPrint)
bufferedTSDWriter.write(",\"" + noConstantParam + "\"");
/*
for (String compartment : topmodel.compartmentIDSet)
{
bufferedTSDWriter.write(", \"" + compartment + "\"");
}
*/
for(ModelState model : submodels.values())
{
for (String speciesID : model.speciesIDSet)
bufferedTSDWriter.write(",\"" + model.ID + "__" + speciesID + "\"");
for (String noConstantParam : model.variablesToPrint)
bufferedTSDWriter.write(",\"" + model.ID + "__" + noConstantParam + "\"");
/*
for (String compartment : model.compartmentIDSet)
bufferedTSDWriter.write(", \"" + model.ID + "__" + compartment + "\"");
*/
}
bufferedTSDWriter.write("),\n");
}
catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
package org.jpacman.test.framework.ui;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import org.jpacman.framework.controller.IController;
import org.jpacman.framework.controller.RandomGhostMover;
import org.jpacman.framework.factory.FactoryException;
import org.jpacman.framework.ui.MainUI;
import org.junit.Before;
import org.junit.Test;
/**
* @author Stefan Hugtenburg - Jesse Donkervliet
*
*/
public class MainUITest {
private MainUI mainUI;
/**
* Create a MainUI to test with.
*/
@Before
public void setUp() {
mainUI = new MainUI();
}
/**
* Test to check the initialize function.
* This function should create a game, while not changing the GhostController.
* @throws FactoryException when calling MainUI.initialize() fails.
*/
@Test
public void testInitialize() throws FactoryException {
assertNull(mainUI.getGame());
IController ghostMover = mainUI.getGhostController();
mainUI.initialize();
assertNotNull(mainUI.getGame());
assertEquals(ghostMover, mainUI.getGhostController());
}
/**
* Test for the withGhostController function.
* This setter should always work if createUI has not yet been called.
* @throws FactoryException if the initialize or createUI functions fail.
*/
@Test
public void testWithGhostController() throws FactoryException {
mainUI.initialize();
assertNull(mainUI.getGhostController());
IController ghostMover1 = new RandomGhostMover(mainUI.getGame());
IController ghostMover2 = new RandomGhostMover(mainUI.getGame());
//Below we apply forced pointer comparison to check the setter.
mainUI.withGhostController(ghostMover1);
assertSame(ghostMover1, mainUI.getGhostController());
//We do a second set call to confirm the first one is gone.
mainUI.withGhostController(ghostMover2);
assertNotSame(ghostMover1, mainUI.getGhostController());
assertSame(ghostMover2, mainUI.getGhostController());
//The createUI function should not affect our current GhostController.
mainUI.createUI();
assertSame(ghostMover2, mainUI.getGhostController());
}
/**
* Test to see the createUI function fail,
* because no GhostController has been set.
* @throws FactoryException if the initialize or createUI functions fail.
*/
@Test
public void testCreateUIWithoutGhostController() throws FactoryException {
assumeTrue(MainUI.class.desiredAssertionStatus());
boolean gotException = false;
mainUI.initialize();
//Do not set a GhostController, this should cause an assertion error.
try {
mainUI.createUI();
}
catch (AssertionError ae) {
gotException = true;
}
assertTrue(gotException);
}
@Test
public void testChangeGhostControllerAfterUI() throws FactoryException {
assumeTrue(MainUI.class.desiredAssertionStatus());
mainUI.initialize();
mainUI.withGhostController(new RandomGhostMover(mainUI.getGame()));
mainUI.createUI();
boolean gotException = false;
//After creating the UI, the GhostController should not be allowed to be changed.
//This should cause an assertion error.
try {
mainUI.withGhostController(new RandomGhostMover(mainUI.getGame()));
}
catch (AssertionError ae) {
gotException = true;
}
assertTrue(gotException);
}
/**
* Test to see if the initializeNormalGame works properly
* and sets all necessary fields, while providing the game with a RandomGhostMover.
* @throws FactoryException if the initialize or createUI functions fail.
*/
@Test
public void testInitializeNormalGame() throws FactoryException {
mainUI.initializeNormalGame();
assertNotNull(mainUI.getGame());
assertTrue(mainUI.getGhostController() instanceof RandomGhostMover);
//Check for the existence of the UI.
assertNotNull(mainUI.eventHandler());
}
}
|
package com.doctor.esper.reference;
import java.math.BigDecimal;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.doctor.esper.common.CommonUpdateListener;
import com.doctor.esper.common.EsperUtil;
import com.doctor.esper.event.Person;
import com.doctor.esper.event.Withdrawal;
import com.espertech.esper.client.EPServiceProvider;
import com.espertech.esper.client.EPStatement;
/**
* Chapter 4.Partitions Contextand Context
*
* @author doctor
*
* @time 201562 4:03:38
*/
public class Chapter4CPartitionsContextandContextTest {
private static final Logger log = LoggerFactory.getLogger(Chapter4CPartitionsContextandContextTest.class);
private static final String config = "esper2015Config/esper-2015.esper.cfg.xml";
private EPServiceProvider epServiceProvider;
@Before
public void init() {
epServiceProvider = EsperUtil.esperConfig(config);
}
@After
public void close() {
epServiceProvider.destroy();
}
@Test
public void test() throws InterruptedException {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("select account,amount ")
.append("from Withdrawal.win:time(2.5 sec) ");
String epl = stringBuilder.toString();
EPStatement epStatement = epServiceProvider.getEPAdministrator().createEPL(epl);
epStatement.addListener(CommonUpdateListener::update);
Withdrawal withdrawal = new Withdrawal("doctor", BigDecimal.valueOf(123.50D));
TimeUnit.SECONDS.sleep(2);
epServiceProvider.getEPRuntime().sendEvent(withdrawal);
TimeUnit.SECONDS.sleep(3);
}
/**
* 4.5. Output When Context Partition Ends
*
* Contextid
* context
* eplcontext
*
*/
@Test
public void test_Output_When_Context_Partition_Ends() throws Throwable {
String context = "create context personBySexContext partition by age from Person ";
String epl = "context personBySexContext select *, context.name as contextName, context.id as contextId from Person.win:length(1) ";
epServiceProvider.getEPAdministrator().createEPL(context);
epServiceProvider.getEPAdministrator().createEPL(epl).setSubscriber(new PersonSubscriber());
TimeUnit.SECONDS.sleep(2);
Person person = new Person("doctor who", "doctor", "man", 2000);
epServiceProvider.getEPRuntime().sendEvent(person);
TimeUnit.SECONDS.sleep(3);
person = new Person("doctor who 2", "doctor", "man", 2000);
epServiceProvider.getEPRuntime().sendEvent(person);
TimeUnit.SECONDS.sleep(2);
person = new Person("doctor who 3", "doctor", "man", 3100);
epServiceProvider.getEPRuntime().sendEvent(person);
TimeUnit.SECONDS.sleep(2);
person = new Person("doctor who 5", "doctor", "man", 5100);
epServiceProvider.getEPRuntime().sendEvent(person);
// {new:{"age":2000,"firstName":"doctor","id":"85486e45-b45b-4442-8ad1-484b7fe8e40d","name":"doctor who","sex":"man"},contextName:personBySexContext,contextId:0}
// {new:{"age":2000,"firstName":"doctor","id":"900588d8-b84c-454d-820b-a5285a4ea0d7","name":"doctor who 2","sex":"man"},contextName:personBySexContext,contextId:0}
// {old:{"age":2000,"firstName":"doctor","id":"85486e45-b45b-4442-8ad1-484b7fe8e40d","name":"doctor who","sex":"man"},contextName:personBySexContext,contextId:0}
// {new:{"age":3100,"firstName":"doctor","id":"828fff0c-8576-4f66-96c4-8e9e9fd1b14e","name":"doctor who 3","sex":"man"},contextName:personBySexContext,contextId:1}
// {new:{"age":5100,"firstName":"doctor","id":"1fc60c94-6677-4439-a262-d1a024f1c5b6","name":"doctor who 5","sex":"man"},contextName:personBySexContext,contextId:2}
}
/**
* 4.2. Context Declaration
*/
@Test
public void test_4_2_Context_Declaration() {
String context = "create context personBySexContext partition by sex from Person ";
String window = "context personBySexContext create window PersonWindow.win:length(3) as Person ";
String epl = "insert into PersonWindow select *, context.name, context.id from Person.win:length(3) ";
String query = " select *, context.name , context.id from PersonWindow ";
epServiceProvider.getEPAdministrator().createEPL(context);
epServiceProvider.getEPAdministrator().createEPL(query).setSubscriber(new PersonSubscriber());
Person person = new Person("doctor who", "doctor", "man", 2000);
epServiceProvider.getEPRuntime().sendEvent(person);
}
public static class PersonSubscriber {
public void update(Person person, String contextName, long contextId) {
log.info("{new:{},contextName:{},contextId:{}}", person, contextName, contextId);
}
public void updateRStream(Person person, String contextName, long contextId) {
log.info("{old:{},contextName:{},contextId:{}}", person, contextName, contextId);
}
}
}
|
import View.BoatView;
import View.View;
public class App {
public static void main(String[] args) {
BoatView view = new View();
((View) view).start();
}
}
|
package snake;
import java.awt.AWTException;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import javax.imageio.ImageIO;
import javax.swing.JPanel;
import javax.swing.Timer;
import utilityClasses.*;
public class SnakePanel extends JPanel implements ActionListener, KeyListener {
private boolean startGame = true;
private boolean playing = false;
private boolean paused = false;
private boolean endGame = false;
private boolean nameEnter = false;
private boolean highScores = false;
private boolean autoPlay = false;
private ScoreInfo scores = new ScoreInfo("snake");
private String pName = "";
private Character letter;
private ArrayList<Point> snakeBody = new ArrayList<Point>();
private ArrayList<Color> snakeColor = new ArrayList<Color>();
private Color[] Colors = { Color.CYAN, Color.RED, Color.GREEN,
Color.YELLOW, Color.ORANGE, Color.WHITE };
private int bodySize = 10;
private Point head = new Point(250, 250);
// private int fruitX = 300;
// private int fruitY = 200;
private ArrayList<Integer> fruitX = new ArrayList<Integer>();
private ArrayList<Integer> fruitY = new ArrayList<Integer>();
// private Color fruitColor = Color.WHITE;
private ArrayList<Color> fruitColor = new ArrayList<Color>();
private int deltaX = 0;
private int deltaY = -bodySize;
private int prevLoseKey = KeyEvent.VK_DOWN;
private int upKey = KeyEvent.VK_UP;
private int downKey = KeyEvent.VK_DOWN;
private int leftKey = KeyEvent.VK_LEFT;
private int rightKey = KeyEvent.VK_RIGHT;
private int[] keyMap = {KeyEvent.VK_UP, KeyEvent.VK_RIGHT, KeyEvent.VK_DOWN, KeyEvent.VK_LEFT};
private int keyIndex = 0;
private Timer timer;
private int origSpeed = 10;
private double speed = origSpeed;
private int score = 0;
public SnakePanel() {
// for (Point x : snakeBody) {
// // System.out.print(x.x + " " + x.y);
// // System.out.println();
setBackground(Color.BLACK);
setFocusable(true);
addKeyListener(this);
randFruitSetup();
timer = new Timer((int) (1000 / speed), this);
resetBody();
timer.start();
}
public void actionPerformed(ActionEvent e) {
moves();
}
public void moves() {
if (playing) {
head.x += deltaX;
head.y += deltaY;
for (int i = snakeBody.size() - 1; i > 0; i
if (head.x == snakeBody.get(i).x
&& head.y == snakeBody.get(i).y) {
playing = false;
nameEnter = true;
}
snakeBody.set(i, snakeBody.get(i - 1));
}
snakeBody.set(0, new Point(head.x, head.y));
int nextHeadX = head.x + deltaX;
int nextHeadY = head.y + deltaY;
for (int i = 0; i < fruitX.size(); i++) {
int fx = fruitX.get(i);
int fy = fruitY.get(i);
if (Math.abs(head.x - fx) < 5 && Math.abs(head.y - fy) < 5) {
addBodySquare(i);
}
}
if (autoPlay) {
autonomous();
}
if (head.x < 1 || head.x > 485 || head.y < 8 || head.y > 465) {
playing = false;
nameEnter = true;
}
}
repaint();
}
public void autonomous() {
int nextHeadX = head.x + deltaX;
int nextHeadY = head.y + deltaY;
// if (Math.abs(head.x - fruitX) < 5 || Math.abs(head.y - fruitY) < 5) {
if ((head.x < 1 + bodySize || head.x > 485 - bodySize) && deltaX != 0) {
deltaX = 0;
deltaY = (head.y - fruitY.get(0) > 0) ? -bodySize : bodySize;
}
if ((head.y < 8 + bodySize || head.y > 465 - bodySize) && deltaY != 0) {
deltaY = 0;
deltaX = (head.x - fruitX.get(0) > 0) ? -bodySize : bodySize;
}
// if ((head.y < 8 + bodySize || head.y > 465 - bodySize) && (head.x < 1 + bodySize || head.x > 485 - bodySize)) {
if (Math.abs(head.x - fruitX.get(0)) < 5) {
deltaX = 0;
deltaY = (head.y - fruitY.get(0) > 0) ? -bodySize : bodySize;
// deltaY = (head.y - fruitY == 0) ? deltaY : (head.y - fruitY > 0)
// ? -bodySize : bodySize;
// addBodySquare();
}
if (Math.abs(head.y - fruitY.get(0)) < 5) {
deltaY = 0;
deltaX = (head.x - fruitX.get(0) > 0) ? -bodySize : bodySize;
// deltaX = (head.x - fruitX == 0) ? deltaX : (head.x - fruitX > 0)
// ? -bodySize : bodySize;
// addBodySquare();
}
}
public void addBodySquare(int fruitIndex) {
int lastBodyX = snakeBody.get(snakeBody.size() - 1).x;
int lastBodyY = snakeBody.get(snakeBody.size() - 1).y;
int secondLastBodyX = snakeBody.get(snakeBody.size() - 2).x;
int secondLastBodyY = snakeBody.get(snakeBody.size() - 2).y;
int changeX = lastBodyX - secondLastBodyX;
int changeY = lastBodyY - secondLastBodyY;
snakeBody.add(new Point(lastBodyX + changeX, lastBodyY + changeY));
snakeColor.add(fruitColor.get(fruitIndex));
fruitX.set(fruitIndex, randNum());
fruitY.set(fruitIndex, randNum());
fruitColor.set(fruitIndex, randColor());
speed += .5;
// System.out.println(speed);
// System.out.println((int) (1000.0 / speed));
timer.setDelay((int) (1000.0 / speed));
score++;
}
public void resetBody() {
snakeBody.clear();
snakeColor.clear();
snakeBody.add(new Point(250, 250));
snakeBody.add(new Point(250, 260));
snakeBody.add(new Point(250, 270));
snakeBody.add(new Point(250, 280));
for (int i = 0; i < snakeBody.size(); i++) {
// Whoop
snakeColor.add(randColor());
}
head.x = 250;
head.y = 250;
deltaY = -bodySize;
deltaX = 0;
prevLoseKey = KeyEvent.VK_DOWN;
speed = origSpeed;
timer.setDelay((int) (1000.0 / speed));
// timer = new Timer((int) (1000 / speed), this);
// timer.start();
}
public void setKeys() {
upKey = keyMap[0];
rightKey = keyMap[1];
downKey = keyMap[2];
leftKey = keyMap[3];
}
public int randNum() {
return ((int) (Math.random() * 45)) * 10 + 10;
}
public Color randColor() {
return Colors[(int) (Math.random() * Colors.length)];
}
public void randFruitSetup() {
for (int i = 0; i < 5; i++) {
fruitX.add(randNum());
fruitY.add(randNum());
fruitColor.add(randColor());
}
}
public void paintComponent(Graphics g) {
super.paintComponent(g);
g.setColor(Color.WHITE);
Graphics2D g2 = (Graphics2D) g;
g2.setStroke(new BasicStroke(15));
g2.drawRect(0, 0, 499, 477);
g2.setStroke(new BasicStroke(2));
if (startGame) {
g.setFont(new Font("Joystix", Font.BOLD, 80));
CenteredText title1 = new CenteredText("SNAKE!!", 500, 500, g,
true, 180);
drawColorOptions(g, 415);
g.setFont(new Font("Joystix", Font.BOLD, 20));
CenteredText start1 = new CenteredText("Press Enter to", 500, 500,
g, true, 300);
CenteredText start2 = new CenteredText("Start", 500, 500, g, true,
330);
g.setFont(new Font("Joystix", Font.BOLD, 12));
CenteredText keyMapInstruct = new CenteredText("Press keys Up, Right, Down, Left to map new keys", 500, 500, g, true, 30);
} else if (playing || paused) {
g.setFont(new Font("Joystix", Font.BOLD, 40));
g.setColor(Color.WHITE);
CenteredText score1 = new CenteredText(String.valueOf(score), 500,
500, g, true, 450);
int i = 0;
for (Point body : snakeBody) {
g.setColor(snakeColor.get(i));
i++;
g.fillRect(body.x, body.y, bodySize, bodySize);
g.setColor(Color.BLACK);
g.drawRect(body.x, body.y, bodySize, bodySize);
for (i = 0; i < fruitX.size(); i++) {
int fx = fruitX.get(i);
int fy = fruitY.get(i);
g.drawRect(fx, fy, bodySize, bodySize);
// g.setColor(Color.WHITE);
g.setColor(fruitColor.get(i));
g.fillRect(fx + 1, fy + 1, bodySize - 2, bodySize - 2);
}
}
if (paused) {
g.setFont(new Font("Joystix", Font.BOLD, 60));
g.setColor(Color.WHITE);
CenteredText pause = new CenteredText("Paused", 500, 500, g,
true, 200);
drawColorOptions(g, 300);
}
} else if (endGame) {
g.setFont(new Font("Joystix", Font.BOLD, 40));
g.setColor(Color.WHITE);
CenteredText score1 = new CenteredText(String.valueOf(score), 500,
500, g, true, 450);
g.setFont(new Font("Joystix", Font.BOLD, 60));
CenteredText lose = new CenteredText("You Lose!", 500, 500, g,
true, 170);
g.setFont(new Font("Joystix", Font.BOLD, 26));
CenteredText restart = new CenteredText("Enter to Restart", 500,
500, g, true, 320);
} else if (nameEnter) {
scores.enterName(g, 500, 500, snakeBody.size(), pName);
} else if (highScores) {
// scores.setScores(timeSeconds, pName);
scores.drawScores(g);
}
}
@Override
public void keyPressed(KeyEvent e) {
// TODO Auto-generated method stub
if (startGame && e.getKeyCode() != KeyEvent.VK_ENTER) {
keyMap[keyIndex] = e.getKeyCode();
keyIndex++;
if (keyIndex > 3) keyIndex = 0;
} else if (e.getKeyCode() == prevLoseKey) {
// playing = false;
// nameEnter = true;
} else if (e.getKeyCode() == upKey) {
deltaX = 0;
deltaY = -bodySize;
prevLoseKey = downKey;
} else if (e.getKeyCode() == downKey) {
deltaX = 0;
deltaY = bodySize;
prevLoseKey = upKey;
} else if (e.getKeyCode() == leftKey) {
deltaY = 0;
deltaX = -bodySize;
prevLoseKey = rightKey;
} else if (e.getKeyCode() == rightKey) {
deltaY = 0;
deltaX = bodySize;
prevLoseKey = leftKey;
} else if (e.getKeyCode() == KeyEvent.VK_ENTER) {
if (startGame) {
playing = true;
startGame = false;
setKeys();
} else if (endGame) {
resetBody();
startGame = false;
playing = true;
nameEnter = false;
highScores = false;
endGame = false;
pName = "";
fruitX = randNum();
fruitY = randNum();
fruitColor = randColor();
speed = 10;
score = 0;
} else if (nameEnter) {
nameEnter = false;
highScores = true;
scores.setScores(score, pName);
} else if (highScores) {
highScores = false;
endGame = true;
} else {
startGame = false;
playing = true;
}
} else if (e.getKeyCode() == KeyEvent.VK_SPACE && !nameEnter) {
playing = !playing;
paused = !paused;
} else if (e.getKeyLocation() == KeyEvent.KEY_LOCATION_STANDARD
&& nameEnter) {
if (pName.length() < 10) {
letter = e.getKeyChar();
letter = Character.toUpperCase(letter);
pName = pName.concat(letter.toString());
}
} else if (e.getKeyCode() == KeyEvent.VK_M && playing) {
autoPlay = !autoPlay;
} else {
// if (startGame || endGame) {
switch (e.getKeyCode()) {
case KeyEvent.VK_R:
fruitColor = Color.RED;
break;
case KeyEvent.VK_G:
fruitColor = Color.GREEN;
break;
case KeyEvent.VK_B:
fruitColor = Color.CYAN;
break;
case KeyEvent.VK_Y:
fruitColor = Color.YELLOW;
break;
case KeyEvent.VK_O:
fruitColor = Color.ORANGE;
break;
case KeyEvent.VK_W: // VK_W - White is default case
fruitColor = Color.WHITE;
break;
}
}
}
public void drawColorOptions(Graphics g, int colorY) {
g.setFont(new Font(Font.DIALOG, Font.BOLD, 45));
g.setColor(Color.RED);
g.drawString("R", 50, colorY);
g.setColor(Color.GREEN);
g.drawString("G", 140, colorY);
g.setColor(Color.CYAN);
g.drawString("B", 230, colorY);
g.setColor(Color.YELLOW);
g.drawString("Y", 315, colorY);
g.setColor(Color.ORANGE);
g.drawString("O", 410, colorY);
// g.setColor(Color.BLACK);
// g.drawString("W", 500, colorY);
g.setColor(Color.WHITE);
}
@Override
public void keyReleased(KeyEvent e) {
// TODO Auto-generated method stub
}
@Override
public void keyTyped(KeyEvent e) {
// TODO Auto-generated method stub
}
}
|
package hudson;
import hudson.maven.ExecutedMojo;
import hudson.model.AbstractProject;
import hudson.model.Action;
import hudson.model.Descriptor;
import hudson.model.Hudson;
import hudson.model.Item;
import hudson.model.ItemGroup;
import hudson.model.Items;
import hudson.model.Job;
import hudson.model.JobPropertyDescriptor;
import hudson.model.ModelObject;
import hudson.model.Node;
import hudson.model.Project;
import hudson.model.Run;
import hudson.model.TopLevelItem;
import hudson.model.View;
import hudson.search.SearchableModelObject;
import hudson.security.AccessControlled;
import hudson.security.AuthorizationStrategy;
import hudson.security.Permission;
import hudson.security.SecurityRealm;
import hudson.slaves.ComputerLauncher;
import hudson.slaves.RetentionStrategy;
import hudson.tasks.BuildStep;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildWrapper;
import hudson.tasks.BuildWrappers;
import hudson.tasks.Builder;
import hudson.tasks.Publisher;
import hudson.util.Area;
import org.acegisecurity.providers.anonymous.AnonymousAuthenticationToken;
import org.apache.commons.jelly.JellyContext;
import org.apache.commons.jexl.parser.ASTSizeFunction;
import org.apache.commons.jexl.util.Introspector;
import org.kohsuke.stapler.Ancestor;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.management.LockInfo;
import java.lang.management.ManagementFactory;
import java.lang.management.MonitorInfo;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.logging.LogRecord;
import java.util.logging.SimpleFormatter;
import java.util.regex.Pattern;
/**
* Utility functions used in views.
*
* <p>
* An instance of this class is created for each request and made accessible
* from view pages via the variable 'h' (h stands for Hudson.)
*
* @author Kohsuke Kawaguchi
*/
public class Functions {
private static volatile int globalIota = 0;
private int iota;
public Functions() {
iota = globalIota;
// prevent the same user from seeing the same ID repeatedly.
globalIota+=1000;
}
/**
* Generates an unique ID.
*/
public String generateId() {
return "id"+iota++;
}
public static boolean isModel(Object o) {
return o instanceof ModelObject;
}
public static String xsDate(Calendar cal) {
return Util.XS_DATETIME_FORMATTER.format(cal.getTime());
}
public static String rfc822Date(Calendar cal) {
return Util.RFC822_DATETIME_FORMATTER.format(cal.getTime());
}
/**
* Prints the integer as a string that represents difference,
* like "-5", "+/-0", "+3".
*/
public static String getDiffString(int i) {
if(i==0) return "\u00B10";
String s = Integer.toString(i);
if(i>0) return "+"+s;
else return s;
}
/**
* {@link #getDiffString(int)} that doesn't show anything for +/-0
*/
public static String getDiffString2(int i) {
if(i==0) return "";
String s = Integer.toString(i);
if(i>0) return "+"+s;
else return s;
}
/**
* {@link #getDiffString2(int)} that puts the result into prefix and suffix
* if there's something to print
*/
public static String getDiffString2(String prefix, int i, String suffix) {
if(i==0) return "";
String s = Integer.toString(i);
if(i>0) return prefix+"+"+s+suffix;
else return prefix+s+suffix;
}
/**
* Adds the proper suffix.
*/
public static String addSuffix(int n, String singular, String plural) {
StringBuffer buf = new StringBuffer();
buf.append(n).append(' ');
if(n==1)
buf.append(singular);
else
buf.append(plural);
return buf.toString();
}
public static RunUrl decompose(StaplerRequest req) {
List<Ancestor> ancestors = req.getAncestors();
// find the first and last Run instances
Ancestor f=null,l=null;
for (Ancestor anc : ancestors) {
if(anc.getObject() instanceof Run) {
if(f==null) f=anc;
l=anc;
}
}
if(l==null) return null; // there was no Run object
String head = f.getPrev().getUrl()+'/';
String base = l.getUrl();
String reqUri = req.getOriginalRequestURI();
// despite the spec saying this string is not decoded,
// Tomcat apparently decodes this string. You see ' ' instead of '%20', which is what
// the browser has sent. So do some quick scan to see if it's ASCII safe, and if not
// re-encode it. Otherwise it won't match with ancUrl.
if(reqUri.indexOf(' ')>=0) {
try {
reqUri = new URI(null,reqUri,null).toASCIIString();
} catch (URISyntaxException e) {
// try to use reqUri as is.
}
}
String rest = reqUri.substring(f.getUrl().length());
return new RunUrl( (Run) f.getObject(), head, base, rest);
}
/**
* If we know the user's screen resolution, return it. Otherwise null.
* @since 1.213
*/
public static Area getScreenResolution() {
Cookie res = Functions.getCookie(Stapler.getCurrentRequest(),"screenResolution");
if(res!=null)
return Area.parse(res.getValue());
return null;
}
public static final class RunUrl {
private final String head, base, rest;
private final Run run;
public RunUrl(Run run, String head, String base, String rest) {
this.run = run;
this.head = head;
this.base = base;
this.rest = rest;
}
public String getBaseUrl() {
return base;
}
/**
* Returns the same page in the next build.
*/
public String getNextBuildUrl() {
return getUrl(run.getNextBuild());
}
/**
* Returns the same page in the previous build.
*/
public String getPreviousBuildUrl() {
return getUrl(run.getPreviousBuild());
}
private String getUrl(Run n) {
if(n ==null)
return null;
else {
return head+n.getNumber()+rest;
}
}
}
public static Node.Mode[] getNodeModes() {
return Node.Mode.values();
}
public static String getProjectListString(List<Project> projects) {
return Items.toNameList(projects);
}
public static Object ifThenElse(boolean cond, Object thenValue, Object elseValue) {
return cond ? thenValue : elseValue;
}
public static String appendIfNotNull(String text, String suffix, String nullText) {
return text == null ? nullText : text + suffix;
}
public static Map getSystemProperties() {
return new TreeMap<Object,Object>(System.getProperties());
}
public static Map getEnvVars() {
return new TreeMap<String,String>(EnvVars.masterEnvVars);
}
public static boolean isWindows() {
return File.pathSeparatorChar==';';
}
public static List<LogRecord> getLogRecords() {
return Hudson.logRecords;
}
public static String printLogRecord(LogRecord r) {
return formatter.format(r);
}
public static Cookie getCookie(HttpServletRequest req,String name) {
Cookie[] cookies = req.getCookies();
if(cookies!=null) {
for (Cookie cookie : cookies) {
if(cookie.getName().equals(name)) {
return cookie;
}
}
}
return null;
}
public static String getCookie(HttpServletRequest req,String name, String defaultValue) {
Cookie c = getCookie(req, name);
if(c==null || c.getValue()==null) return defaultValue;
return c.getValue();
}
/**
* Gets the suffix to use for YUI JavaScript.
*/
public static String getYuiSuffix() {
return DEBUG_YUI ? "debug" : "min";
}
/**
* Set to true if you need to use the debug version of YUI.
*/
public static boolean DEBUG_YUI = System.getProperty("debug.YUI")!=null;
/**
* Creates a sub map by using the given range (both ends inclusive).
*/
public static <V> SortedMap<Integer,V> filter(SortedMap<Integer,V> map, String from, String to) {
if(from==null && to==null) return map;
if(to==null)
return map.headMap(Integer.parseInt(from)-1);
if(from==null)
return map.tailMap(Integer.parseInt(to));
return map.subMap(Integer.parseInt(to),Integer.parseInt(from)-1);
}
private static final SimpleFormatter formatter = new SimpleFormatter();
/**
* Used by <tt>layout.jelly</tt> to control the auto refresh behavior.
*
* @param noAutoRefresh
* On certain pages, like a page with forms, will have annoying interference
* with auto refresh. On those pages, disable auto-refresh.
*/
public static void configureAutoRefresh(HttpServletRequest request, HttpServletResponse response, boolean noAutoRefresh) {
if(noAutoRefresh)
return;
String param = request.getParameter("auto_refresh");
boolean refresh = isAutoRefresh(request);
if (param != null) {
refresh = Boolean.parseBoolean(param);
Cookie c = new Cookie("hudson_auto_refresh", Boolean.toString(refresh));
// Need to set path or it will not stick from e.g. a project page to the dashboard.
// Using request.getContextPath() might work but it seems simpler to just use the hudson_ prefix
// to avoid conflicts with any other web apps that might be on the same machine.
c.setPath("/");
c.setMaxAge(60*60*24*30); // persist it roughly for a month
response.addCookie(c);
}
if (refresh) {
response.addHeader("Refresh", "10");
}
}
public static boolean isAutoRefresh(HttpServletRequest request) {
String param = request.getParameter("auto_refresh");
if (param != null) {
return Boolean.parseBoolean(param);
}
Cookie[] cookies = request.getCookies();
if(cookies==null)
return false; // when API design messes it up, we all suffer
for (Cookie c : cookies) {
if (c.getName().equals("hudson_auto_refresh")) {
return Boolean.parseBoolean(c.getValue());
}
}
return false;
}
/**
* Finds the given object in the ancestor list and returns its URL.
* This is used to determine the "current" URL assigned to the given object,
* so that one can compute relative URLs from it.
*/
public static String getNearestAncestorUrl(StaplerRequest req,Object it) {
List list = req.getAncestors();
for( int i=list.size()-1; i>=0; i
Ancestor anc = (Ancestor) list.get(i);
if(anc.getObject()==it)
return anc.getUrl();
}
return null;
}
/**
* Finds the inner-most {@link SearchableModelObject} in scope.
*/
public static String getSearchURL() {
List list = Stapler.getCurrentRequest().getAncestors();
for( int i=list.size()-1; i>=0; i
Ancestor anc = (Ancestor) list.get(i);
if(anc.getObject() instanceof SearchableModelObject)
return anc.getUrl()+"/search/";
}
return null;
}
public static String appendSpaceIfNotNull(String n) {
if(n==null) return null;
else return n+' ';
}
public static String getWin32ErrorMessage(IOException e) {
return Util.getWin32ErrorMessage(e);
}
public static boolean isMultiline(String s) {
if(s==null) return false;
return s.indexOf('\r')>=0 || s.indexOf('\n')>=0;
}
public static String encode(String s) {
return Util.encode(s);
}
public static String escape(String s) {
return Util.escape(s);
}
public static String xmlEscape(String s) {
return Util.xmlEscape(s);
}
public static void checkPermission(Permission permission) throws IOException, ServletException {
checkPermission(Hudson.getInstance(),permission);
}
public static void checkPermission(AccessControlled object, Permission permission) throws IOException, ServletException {
if (permission != null) {
object.checkPermission(permission);
}
}
public static boolean hasPermission(Permission permission) throws IOException, ServletException {
return hasPermission(Hudson.getInstance(),permission);
}
public static boolean hasPermission(AccessControlled object, Permission permission) throws IOException, ServletException {
return permission==null || object.hasPermission(permission);
}
public static void adminCheck(StaplerRequest req, StaplerResponse rsp, Object required, Permission permission) throws IOException, ServletException {
if(required!=null && !Hudson.adminCheck(req,rsp)) {
// check failed. commit the FORBIDDEN response, then abort.
rsp.setStatus(HttpServletResponse.SC_FORBIDDEN);
rsp.getOutputStream().close();
throw new ServletException("Unauthorized access");
}
if(permission!=null)
checkPermission(permission);
}
/**
* Infers the hudson installation URL from the given request.
*/
public static String inferHudsonURL(StaplerRequest req) {
String rootUrl = Hudson.getInstance().getRootUrl();
if(rootUrl !=null)
// prefer the one explicitly configured, to work with load-balancer, frontend, etc.
return rootUrl;
StringBuilder buf = new StringBuilder();
buf.append(req.getScheme()).append(":
buf.append(req.getServerName());
if(req.getLocalPort()!=80)
buf.append(':').append(req.getLocalPort());
buf.append(req.getContextPath()).append('/');
return buf.toString();
}
public static List<JobPropertyDescriptor> getJobPropertyDescriptors(Class<? extends Job> clazz) {
return JobPropertyDescriptor.getPropertyDescriptors(clazz);
}
public static List<Descriptor<BuildWrapper>> getBuildWrapperDescriptors(AbstractProject<?,?> project) {
return BuildWrappers.getFor(project);
}
public static List<Descriptor<SecurityRealm>> getSecurityRealmDescriptors() {
return SecurityRealm.LIST;
}
public static List<Descriptor<AuthorizationStrategy>> getAuthorizationStrategyDescriptors() {
return AuthorizationStrategy.LIST;
}
public static List<Descriptor<Builder>> getBuilderDescriptors(AbstractProject<?,?> project) {
return BuildStepDescriptor.filter(BuildStep.BUILDERS, project.getClass());
}
public static List<Descriptor<Publisher>> getPublisherDescriptors(AbstractProject<?,?> project) {
return BuildStepDescriptor.filter(BuildStep.PUBLISHERS, project.getClass());
}
public static List<Descriptor<ComputerLauncher>> getComputerLauncherDescriptors() {
return ComputerLauncher.LIST;
}
public static List<Descriptor<RetentionStrategy<?>>> getRetentionStrategyDescriptors() {
return RetentionStrategy.LIST;
}
/**
* Computes the path to the icon of the given action
* from the context path.
*/
public static String getIconFilePath(Action a) {
String name = a.getIconFileName();
if(name.startsWith("/"))
return name.substring(1);
else
return "images/24x24/"+name;
}
/**
* Works like JSTL build-in size(x) function,
* but handle null gracefully.
*/
public static int size2(Object o) throws Exception {
if(o==null) return 0;
return ASTSizeFunction.sizeOf(o,Introspector.getUberspect());
}
public static ExecutedMojo.Cache createExecutedMojoCache() {
return new ExecutedMojo.Cache();
}
/**
* Computes the relative path from the current page to the given item.
*/
public static String getRelativeLinkTo(Item p) {
Map<Object,String> ancestors = new HashMap<Object,String>();
View view=null;
StaplerRequest request = Stapler.getCurrentRequest();
for( Ancestor a : request.getAncestors() ) {
ancestors.put(a.getObject(),a.getRelativePath());
if(a.getObject() instanceof View)
view = (View) a.getObject();
}
String path = ancestors.get(p);
if(path!=null) return path;
Item i=p;
String url = "";
while(true) {
ItemGroup ig = i.getParent();
url = i.getShortUrl()+url;
if(ig==Hudson.getInstance()) {
assert i instanceof TopLevelItem;
if(view!=null && view.contains((TopLevelItem)i)) {
// if p and the current page belongs to the same view, then return a relative path
return ancestors.get(view)+'/'+url;
} else {
// otherwise return a path from the root Hudson
return request.getContextPath()+'/'+p.getUrl();
}
}
path = ancestors.get(ig);
if(path!=null) return path+'/'+url;
assert ig instanceof Item; // if not, ig must have been the Hudson instance
i = (Item) ig;
}
}
public static Map<Thread,StackTraceElement[]> dumpAllThreads() {
return Thread.getAllStackTraces();
}
public static ThreadInfo[] getThreadInfos() {
ThreadMXBean mbean = ManagementFactory.getThreadMXBean();
return mbean.getThreadInfo(mbean.getAllThreadIds(),mbean.isObjectMonitorUsageSupported(),mbean.isSynchronizerUsageSupported());
}
/**
* Are we running on JRE6 or above?
*/
public static boolean isMustangOrAbove() {
try {
System.console();
return true;
} catch(LinkageError e) {
return false;
}
}
// ThreadInfo.toString() truncates the stack trace by first 8, so needed my own version
public static String dumpThreadInfo(ThreadInfo ti) {
StringBuilder sb = new StringBuilder("\"" + ti.getThreadName() + "\"" +
" Id=" + ti.getThreadId() + " " +
ti.getThreadState());
if (ti.getLockName() != null) {
sb.append(" on " + ti.getLockName());
}
if (ti.getLockOwnerName() != null) {
sb.append(" owned by \"" + ti.getLockOwnerName() +
"\" Id=" + ti.getLockOwnerId());
}
if (ti.isSuspended()) {
sb.append(" (suspended)");
}
if (ti.isInNative()) {
sb.append(" (in native)");
}
sb.append('\n');
StackTraceElement[] stackTrace = ti.getStackTrace();
for (int i=0; i < stackTrace.length; i++) {
StackTraceElement ste = stackTrace[i];
sb.append("\tat " + ste.toString());
sb.append('\n');
if (i == 0 && ti.getLockInfo() != null) {
Thread.State ts = ti.getThreadState();
switch (ts) {
case BLOCKED:
sb.append("\t- blocked on " + ti.getLockInfo());
sb.append('\n');
break;
case WAITING:
sb.append("\t- waiting on " + ti.getLockInfo());
sb.append('\n');
break;
case TIMED_WAITING:
sb.append("\t- waiting on " + ti.getLockInfo());
sb.append('\n');
break;
default:
}
}
for (MonitorInfo mi : ti.getLockedMonitors()) {
if (mi.getLockedStackDepth() == i) {
sb.append("\t- locked " + mi);
sb.append('\n');
}
}
}
LockInfo[] locks = ti.getLockedSynchronizers();
if (locks.length > 0) {
sb.append("\n\tNumber of locked synchronizers = " + locks.length);
sb.append('\n');
for (LockInfo li : locks) {
sb.append("\t- " + li);
sb.append('\n');
}
}
sb.append('\n');
return sb.toString();
}
public static <T> Collection<T> emptyList() {
return Collections.emptyList();
}
public static String jsStringEscape(String s) {
StringBuilder buf = new StringBuilder();
for( int i=0; i<s.length(); i++ ) {
char ch = s.charAt(i);
switch(ch) {
case '\'':
buf.append("\\'");
break;
case '\\':
buf.append("\\\\");
break;
case '"':
buf.append("\\\"");
break;
default:
buf.append(ch);
}
}
return buf.toString();
}
public static String getVersion() {
return Hudson.VERSION;
}
/**
* Resoruce path prefix.
*/
public static String getResourcePath() {
return Hudson.RESOURCE_PATH;
}
public static String getViewResource(Object it, String path) {
Class clazz = it.getClass();
if(it instanceof Class)
clazz = (Class)it;
if(it instanceof Descriptor)
clazz = ((Descriptor)it).clazz;
StringBuilder buf = new StringBuilder(Stapler.getCurrentRequest().getContextPath());
buf.append(Hudson.VIEW_RESOURCE_PATH).append('/');
buf.append(clazz.getName().replace('.','/').replace('$','/'));
buf.append('/').append(path);
return buf.toString();
}
/**
* Can be used to check a checkbox by default.
* Used from views like {@code h.defaultToTrue(scm.useUpdate)}.
* The expression will evaluate to true if scm is null.
*/
public static boolean defaultToTrue(Boolean b) {
if(b==null) return true;
return b;
}
/**
* If the value exists, return that value. Otherwise return the default value.
* <p>
* This method is useful for supplying a default value to a form field.
*
* @since 1.150
*/
public static <T> T defaulted(T value, T defaultValue) {
return value!=null ? value : defaultValue;
}
public static String printThrowable(Throwable t) {
StringWriter sw = new StringWriter();
t.printStackTrace(new PrintWriter(sw));
return sw.toString();
}
/**
* Counts the number of rows needed for textarea to fit the content.
* Minimum 5 rows.
*/
public static int determineRows(String s) {
if(s==null) return 5;
return Math.max(5,LINE_END.split(s).length);
}
/**
* Converts the Hudson build status to CruiseControl build status,
* which is either Success, Failure, Exception, or Unknown.
*/
public static String toCCStatus(Item i) {
if (i instanceof Job) {
Job j = (Job) i;
switch (j.getIconColor().noAnime()) {
case ABORTED:
case RED:
case YELLOW:
return "Failure";
case BLUE:
return "Success";
case DISABLED:
case GREY:
return "Unknown";
}
}
return "Unknown";
}
private static final Pattern LINE_END = Pattern.compile("\r?\n");
/**
* Checks if the current user is anonymous.
*/
public static boolean isAnonymous() {
return Hudson.getAuthentication() instanceof AnonymousAuthenticationToken;
}
/**
* When called from within JEXL expression evaluation,
* this method returns the current {@link JellyContext} used
* to evaluate the script.
*
* @since 1.164
*/
public static JellyContext getCurrentJellyContext() {
JellyContext context = ExpressionFactory2.CURRENT_CONTEXT.get();
assert context!=null;
return context;
}
/**
* Returns a sub-list if the given list is bigger than the specified 'maxSize'
*/
public static <T> List<T> subList(List<T> base, int maxSize) {
if(maxSize<base.size())
return base.subList(0,maxSize);
else
return base;
}
/**
* Computes the hyperlink to actions, to handle the situation when the {@link Action#getUrlName()}
* returns absolute URL.
*/
public static String getActionUrl(String itUrl,Action action) {
String urlName = action.getUrlName();
if(SCHEME.matcher(urlName).matches())
return urlName; // absolute URL
else
// relative URL name
return Stapler.getCurrentRequest().getContextPath()+'/'+itUrl+urlName;
}
public static String toEmailSafeString(String projectName) {
// TODO: escape non-ASCII characters
StringBuilder buf = new StringBuilder(projectName.length());
for( int i=0; i<projectName.length(); i++ ) {
char ch = projectName.charAt(i);
if(('a'<=ch && ch<='z')
|| ('z'<=ch && ch<='Z')
|| ('0'<=ch && ch<='9')
|| "-_.".indexOf(ch)>=0)
buf.append(ch);
else
buf.append('_'); // escape
}
return projectName;
}
public String getSystemProperty(String key) {
return System.getProperty(key);
}
private static final Pattern SCHEME = Pattern.compile("[a-z]+:
}
|
package io.spine.server.tenant;
import com.google.common.base.Function;
import io.spine.annotation.Internal;
import io.spine.core.TenantId;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A function which can work in single-tenant and multi-tenant context and return a
* value depending on the current tenant set.
*
* @param <T> the type of the result returned by the function
* @author Alexander Yevsyukov
*/
@Internal
public abstract class TenantFunction<T> extends TenantAware implements Function<TenantId, T> {
/**
* Creates a new instance of the function.
*
* @param multitenant if {@code true} the function is executed in the multi-tenant context,
* {@code false} for single-tenant context
*/
protected TenantFunction(boolean multitenant) {
super(TenantAware.getCurrentTenant(multitenant));
}
/**
* Applies the function and returns the result.
*
* @return the result of the function
*/
public @Nullable T execute() {
final TenantId currentTenant = tenantId();
final T result = apply(currentTenant);
return result;
}
}
|
package hex.deeplearning;
import hex.*;
import hex.quantile.Quantile;
import hex.quantile.QuantileModel;
import hex.schemas.DeepLearningModelV2;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import water.*;
import water.api.ModelSchema;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import static hex.ModelMetrics.calcVarImp;
import static java.lang.Double.isNaN;
/**
* The Deep Learning model
* It contains a DeepLearningModelInfo with the most up-to-date model,
* a scoring history, as well as some helpers to indicate the progress
*/
public class DeepLearningModel extends SupervisedModel<DeepLearningModel,DeepLearningModel.DeepLearningParameters,DeepLearningModel.DeepLearningModelOutput> implements Model.DeepFeatures {
public static class DeepLearningParameters extends SupervisedModel.SupervisedParameters {
// public int _n_folds;
public int getNumFolds() { return 0; }
public boolean _keep_cross_validation_splits;
/**
* A model key associated with a previously trained Deep Learning
* model. This option allows users to build a new model as a
* continuation of a previously generated model.
*/
public Key _checkpoint;
/**
* If enabled, store the best model under the destination key of this model at the end of training.
* Only applicable if training is not cancelled.
*/
public boolean _override_with_best_model = true;
public boolean _autoencoder = false;
public boolean _use_all_factor_levels = true;
/*Neural Net Topology*/
/**
* The activation function (non-linearity) to be used the neurons in the hidden layers.
* Tanh: Hyperbolic tangent function (same as scaled and shifted sigmoid).
* Rectifier: Chooses the maximum of (0, x) where x is the input value.
* Maxout: Choose the maximum coordinate of the input vector.
* With Dropout: Zero out a random user-given fraction of the
* incoming weights to each hidden layer during training, for each
* training row. This effectively trains exponentially many models at
* once, and can improve generalization.
*/
public Activation _activation = Activation.Rectifier;
/**
* The number and size of each hidden layer in the model.
* For example, if a user specifies "100,200,100" a model with 3 hidden
* layers will be produced, and the middle hidden layer will have 200
* neurons.
*/
public int[] _hidden = new int[] { 200, 200 };
/**
* The number of passes over the training dataset to be carried out.
* It is recommended to start with lower values for initial experiments.
* This value can be modified during checkpoint restarts and allows continuation
* of selected models.
*/
public double _epochs = 10;
/**
* The number of training data rows to be processed per iteration. Note that
* independent of this parameter, each row is used immediately to update the model
* with (online) stochastic gradient descent. This parameter controls the
* synchronization period between nodes in a distributed environment and the
* frequency at which scoring and model cancellation can happen. For example, if
* it is set to 10,000 on H2O running on 4 nodes, then each node will
* process 2,500 rows per iteration, sampling randomly from their local data.
* Then, model averaging between the nodes takes place, and scoring can happen
* (dependent on scoring interval and duty factor). Special values are 0 for
* one epoch per iteration, -1 for processing the maximum amount of data
* per iteration (if **replicate training data** is enabled, N epochs
* will be trained per iteration on N nodes, otherwise one epoch). Special value
* of -2 turns on automatic mode (auto-tuning).
*/
public long _train_samples_per_iteration = -2;
public double _target_ratio_comm_to_comp = 0.02;
/**
* The random seed controls sampling and initialization. Reproducible
* results are only expected with single-threaded operation (i.e.,
* when running on one node, turning off load balancing and providing
* a small dataset that fits in one chunk). In general, the
* multi-threaded asynchronous updates to the model parameters will
* result in (intentional) race conditions and non-reproducible
* results. Note that deterministic sampling and initialization might
* still lead to some weak sense of determinism in the model.
*/
public long _seed = new Random().nextLong();
/*Adaptive Learning Rate*/
/**
* The implemented adaptive learning rate algorithm (ADADELTA) automatically
* combines the benefits of learning rate annealing and momentum
* training to avoid slow convergence. Specification of only two
* parameters (rho and epsilon) simplifies hyper parameter search.
* In some cases, manually controlled (non-adaptive) learning rate and
* momentum specifications can lead to better results, but require the
* specification (and hyper parameter search) of up to 7 parameters.
* If the model is built on a topology with many local minima or
* long plateaus, it is possible for a constant learning rate to produce
* sub-optimal results. Learning rate annealing allows digging deeper into
* local minima, while rate decay allows specification of different
* learning rates per layer. When the gradient is being estimated in
* a long valley in the optimization landscape, a large learning rate
* can cause the gradient to oscillate and move in the wrong
* direction. When the gradient is computed on a relatively flat
* surface with small learning rates, the model can converge far
* slower than necessary.
*/
public boolean _adaptive_rate = true;
/**
* The first of two hyper parameters for adaptive learning rate (ADADELTA).
* It is similar to momentum and relates to the memory to prior weight updates.
* Typical values are between 0.9 and 0.999.
* This parameter is only active if adaptive learning rate is enabled.
*/
public double _rho = 0.99;
/**
* The second of two hyper parameters for adaptive learning rate (ADADELTA).
* It is similar to learning rate annealing during initial training
* and momentum at later stages where it allows forward progress.
* Typical values are between 1e-10 and 1e-4.
* This parameter is only active if adaptive learning rate is enabled.
*/
public double _epsilon = 1e-8;
/*Learning Rate*/
/**
* When adaptive learning rate is disabled, the magnitude of the weight
* updates are determined by the user specified learning rate
* (potentially annealed), and are a function of the difference
* between the predicted value and the target value. That difference,
* generally called delta, is only available at the output layer. To
* correct the output at each hidden layer, back propagation is
* used. Momentum modifies back propagation by allowing prior
* iterations to influence the current update. Using the momentum
* parameter can aid in avoiding local minima and the associated
* instability. Too much momentum can lead to instabilities, that's
* why the momentum is best ramped up slowly.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _rate = .005;
/**
* Learning rate annealing reduces the learning rate to "freeze" into
* local minima in the optimization landscape. The annealing rate is the
* inverse of the number of training samples it takes to cut the learning rate in half
* (e.g., 1e-6 means that it takes 1e6 training samples to halve the learning rate).
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _rate_annealing = 1e-6;
/**
* The learning rate decay parameter controls the change of learning rate across layers.
* For example, assume the rate parameter is set to 0.01, and the rate_decay parameter is set to 0.5.
* Then the learning rate for the weights connecting the input and first hidden layer will be 0.01,
* the learning rate for the weights connecting the first and the second hidden layer will be 0.005,
* and the learning rate for the weights connecting the second and third hidden layer will be 0.0025, etc.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _rate_decay = 1.0;
/*Momentum*/
/**
* The momentum_start parameter controls the amount of momentum at the beginning of training.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _momentum_start = 0;
/**
* The momentum_ramp parameter controls the amount of learning for which momentum increases
* (assuming momentum_stable is larger than momentum_start). The ramp is measured in the number
* of training samples.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _momentum_ramp = 1e6;
/**
* The momentum_stable parameter controls the final momentum value reached after momentum_ramp training samples.
* The momentum used for training will remain the same for training beyond reaching that point.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _momentum_stable = 0;
/**
* The Nesterov accelerated gradient descent method is a modification to
* traditional gradient descent for convex functions. The method relies on
* gradient information at various points to build a polynomial approximation that
* minimizes the residuals in fewer iterations of the descent.
* This parameter is only active if adaptive learning rate is disabled.
*/
public boolean _nesterov_accelerated_gradient = true;
/*Regularization*/
/**
* A fraction of the features for each training row to be omitted from training in order
* to improve generalization (dimension sampling).
*/
public double _input_dropout_ratio = 0.0;
/**
* A fraction of the inputs for each hidden layer to be omitted from training in order
* to improve generalization. Defaults to 0.5 for each hidden layer if omitted.
*/
public double[] _hidden_dropout_ratios;
/**
* A regularization method that constrains the absolute value of the weights and
* has the net effect of dropping some weights (setting them to zero) from a model
* to reduce complexity and avoid overfitting.
*/
public double _l1 = 0.0;
/**
* A regularization method that constrdains the sum of the squared
* weights. This method introduces bias into parameter estimates, but
* frequently produces substantial gains in modeling as estimate variance is
* reduced.
*/
public double _l2 = 0.0;
/**
* A maximum on the sum of the squared incoming weights into
* any one neuron. This tuning parameter is especially useful for unbound
* activation functions such as Maxout or Rectifier.
*/
public float _max_w2 = Float.POSITIVE_INFINITY;
/*Initialization*/
/**
* The distribution from which initial weights are to be drawn. The default
* option is an optimized initialization that considers the size of the network.
* The "uniform" option uses a uniform distribution with a mean of 0 and a given
* interval. The "normal" option draws weights from the standard normal
* distribution with a mean of 0 and given standard deviation.
*/
public InitialWeightDistribution _initial_weight_distribution = InitialWeightDistribution.UniformAdaptive;
/**
* The scale of the distribution function for Uniform or Normal distributions.
* For Uniform, the values are drawn uniformly from -initial_weight_scale...initial_weight_scale.
* For Normal, the values are drawn from a Normal distribution with a standard deviation of initial_weight_scale.
*/
public double _initial_weight_scale = 1.0;
/**
* The loss (error) function to be minimized by the model.
* Cross Entropy loss is used when the model output consists of independent
* hypotheses, and the outputs can be interpreted as the probability that each
* hypothesis is true. Cross entropy is the recommended loss function when the
* target values are class labels, and especially for imbalanced data.
* It strongly penalizes error in the prediction of the actual class label.
* Mean Square loss is used when the model output are continuous real values, but can
* be used for classification as well (where it emphasizes the error on all
* output classes, not just for the actual class).
*/
public Loss _loss = Loss.Automatic;
/*Scoring*/
/**
* The minimum time (in seconds) to elapse between model scoring. The actual
* interval is determined by the number of training samples per iteration and the scoring duty cycle.
*/
public double _score_interval = 5;
/**
* The number of training dataset points to be used for scoring. Will be
* randomly sampled. Use 0 for selecting the entire training dataset.
*/
public long _score_training_samples = 10000l;
/**
* The number of validation dataset points to be used for scoring. Can be
* randomly sampled or stratified (if "balance classes" is set and "score
* validation sampling" is set to stratify). Use 0 for selecting the entire
* training dataset.
*/
public long _score_validation_samples = 0l;
/**
* Maximum fraction of wall clock time spent on model scoring on training and validation samples,
* and on diagnostics such as computation of feature importances (i.e., not on training).
*/
public double _score_duty_cycle = 0.1;
/**
* The stopping criteria in terms of classification error (1-accuracy) on the
* training data scoring dataset. When the error is at or below this threshold,
* training stops.
*/
public double _classification_stop = 0;
/**
* The stopping criteria in terms of regression error (MSE) on the training
* data scoring dataset. When the error is at or below this threshold, training
* stops.
*/
public double _regression_stop = 1e-6;
/**
* Enable quiet mode for less output to standard output.
*/
public boolean _quiet_mode = false;
/**
* Method used to sample the validation dataset for scoring, see Score Validation Samples above.
*/
public ClassSamplingMethod _score_validation_sampling = ClassSamplingMethod.Uniform;
/*Misc*/
/**
* Gather diagnostics for hidden layers, such as mean and RMS values of learning
* rate, momentum, weights and biases.
*/
public boolean _diagnostics = true;
/**
* Whether to compute variable importances for input features.
* The implemented method (by Gedeon) considers the weights connecting the
* input features to the first two hidden layers.
*/
public boolean _variable_importances = false;
/**
* Enable fast mode (minor approximation in back-propagation), should not affect results significantly.
*/
public boolean _fast_mode = true;
/**
* Ignore constant training columns (no information can be gained anyway).
*/
public boolean _ignore_const_cols = true;
/**
* Increase training speed on small datasets by splitting it into many chunks
* to allow utilization of all cores.
*/
public boolean _force_load_balance = true;
/**
* Replicate the entire training dataset onto every node for faster training on small datasets.
*/
public boolean _replicate_training_data = true;
/**
* Run on a single node for fine-tuning of model parameters. Can be useful for
* checkpoint resumes after training on multiple nodes for fast initial
* convergence.
*/
public boolean _single_node_mode = false;
/**
* Enable shuffling of training data (on each node). This option is
* recommended if training data is replicated on N nodes, and the number of training samples per iteration
* is close to N times the dataset size, where all nodes train will (almost) all
* the data. It is automatically enabled if the number of training samples per iteration is set to -1 (or to N
* times the dataset size or larger).
*/
public boolean _shuffle_training_data = false;
public MissingValuesHandling _missing_values_handling = MissingValuesHandling.MeanImputation;
public boolean _sparse = false;
public boolean _col_major = false;
public double _average_activation = 0;
public double _sparsity_beta = 0;
/**
* Max. number of categorical features, enforced via hashing (Experimental)
*/
public int _max_categorical_features = Integer.MAX_VALUE;
/**
* Force reproducibility on small data (will be slow - only uses 1 thread)
*/
public boolean _reproducible = false;
public boolean _export_weights_and_biases = false;
public enum MissingValuesHandling {
Skip, MeanImputation
}
public enum ClassSamplingMethod {
Uniform, Stratified
}
public enum InitialWeightDistribution {
UniformAdaptive, Uniform, Normal
}
/**
* Activation functions
*/
public enum Activation {
Tanh, TanhWithDropout, Rectifier, RectifierWithDropout, Maxout, MaxoutWithDropout
}
/**
* Loss functions
* Absolute, MeanSquare, Huber for regression
* Absolute, MeanSquare, Huber or CrossEntropy for classification
*/
public enum Loss {
Automatic, MeanSquare, CrossEntropy, Huber, Absolute
}
void validate( DeepLearning dl, boolean expensive ) {
dl.hide("_score_each_iteration", "Not used by Deep Learning.");
boolean classification = expensive || dl._nclass != 0 ? dl.isClassifier() : _loss == Loss.CrossEntropy;
if (_hidden == null || _hidden.length == 0) dl.error("_hidden", "There must be at least one hidden layer.");
for( int h : _hidden ) if( h<=0 ) dl.error("_hidden", "Hidden layer size must be positive.");
if (!_autoencoder) {
if (_valid == null)
dl.hide("_score_validation_samples", "score_validation_samples requires a validation frame.");
if (classification) {
dl.hide("_regression_stop", "regression_stop is used only with regression.");
} else {
dl.hide("_classification_stop", "classification_stop is used only with classification.");
dl.hide("_max_confusion_matrix_size", "max_confusion_matrix_size is used only with classification.");
dl.hide("_max_hit_ratio_k", "max_hit_ratio_k is used only with classification.");
dl.hide("_balance_classes", "balance_classes is used only with classification.");
}
if( !classification || !_balance_classes )
dl.hide("_class_sampling_factors", "class_sampling_factors requires both classification and balance_classes.");
if (!classification && _valid != null || _valid == null)
dl.hide("_score_validation_sampling", "score_validation_sampling requires classification and a validation frame.");
}
// Auto-fill defaults
if (_activation != Activation.TanhWithDropout && _activation != Activation.MaxoutWithDropout && _activation != Activation.RectifierWithDropout)
dl.hide("_hidden_dropout_ratios", "hidden_dropout_ratios requires a dropout activation function.");
if (_hidden_dropout_ratios == null) {
if (_activation == Activation.TanhWithDropout || _activation == Activation.MaxoutWithDropout || _activation == Activation.RectifierWithDropout) {
if (expensive) {
_hidden_dropout_ratios = new double[_hidden.length];
dl.info("_hidden_dropout_ratios", "Automatically setting all hidden dropout ratios to 0.5.");
Arrays.fill(_hidden_dropout_ratios, 0.5);
}
}
}
else if (_hidden_dropout_ratios.length != _hidden.length) {
dl.error("_hidden_dropout_ratios", "Must have " + _hidden.length + " hidden layer dropout ratios.");
}
else if (_activation != Activation.TanhWithDropout && _activation != Activation.MaxoutWithDropout && _activation != Activation.RectifierWithDropout) {
if (!_quiet_mode) dl.hide("_hidden_dropout_ratios", "Ignoring hidden_dropout_ratios because a non-dropout activation function was specified.");
}
if (_input_dropout_ratio < 0 || _input_dropout_ratio >= 1)
dl.error("_input_dropout_ratio", "Input dropout must be >= 0 and <1.");
if (H2O.CLOUD.size() == 1 && _replicate_training_data) {
dl.hide("_replicate_training_data", "replicate_training_data is only valid with cloud size greater than 1.");
if (expensive) {
dl.info("_replicate_training_data", "Disabling replicate_training_data on 1 node.");
_replicate_training_data = false;
}
}
if (_single_node_mode && (H2O.CLOUD.size() == 1 || !_replicate_training_data)) {
dl.hide("_single_node_mode", "single_node_mode is only used with multi-node operation with replicated training data.");
if (expensive) {
dl.info("_single_node_mode", "Disabling single_node_mode (only for multi-node operation with replicated training data).");
_single_node_mode = false;
}
}
if (_autoencoder) {
dl.hide("_use_all_factor_levels", "use_all_factor_levels is mandatory in combination with autoencoder.");
}
if (!_use_all_factor_levels && _autoencoder ) {
if (expensive) {
dl.warn("_use_all_factor_levels", "Automatically enabling all_factor_levels for auto-encoders.");
_use_all_factor_levels = true;
}
}
if (getNumFolds() != 0)
dl.hide("_override_with_best_model", "override_with_best_model is unsupported in combination with n-fold cross-validation.");
if(_override_with_best_model && getNumFolds() != 0) {
if (expensive) {
dl.warn("_override_with_best_model", "Disabling override_with_best_model in combination with n-fold cross-validation.");
_override_with_best_model = false;
}
}
if (_adaptive_rate) {
dl.hide("_rate", "rate is not used with adaptive_rate.");
dl.hide("_rate_annealing", "rate_annealing is not used with adaptive_rate.");
dl.hide("_rate_decay", "rate_decay is not used with adaptive_rate.");
dl.hide("_momentum_start", "momentum_start is not used with adaptive_rate.");
dl.hide("_momentum_ramp", "momentum_ramp is not used with adaptive_rate.");
dl.hide("_momentum_stable", "momentum_stable is not used with adaptive_rate.");
dl.hide("_nesterov_accelerated_gradient", "nesterov_accelerated_gradient is not used with adaptive_rate.");
} else {
// ! adaptive_rate
dl.hide("_rho", "rho is only used with adaptive_rate.");
dl.hide("_epsilon", "epsilon is only used with adaptive_rate.");
}
if (_adaptive_rate) {
if (expensive) {
dl.info("_adaptive_rate", "Using automatic learning rate. Ignoring the following input parameters: "
+ "rate, rate_decay, rate_annealing, momentum_start, momentum_ramp, momentum_stable, nesterov_accelerated_gradient.");
_momentum_start = 0;
_momentum_stable = 0;
}
} else {
if (expensive) {
dl.info("_adaptive_rate", "Using manual learning rate. Ignoring the following input parameters: "
+ "rho, epsilon.");
_rho = 0;
_epsilon = 0;
}
}
if (_initial_weight_distribution == InitialWeightDistribution.UniformAdaptive) {
dl.hide("_initial_weight_scale", "initial_weight_scale is not used if initial_weight_distribution == UniformAdaptive.");
}
if (getNumFolds() != 0) {
dl.error("_n_folds", "n_folds is not yet implemented.");
if (expensive) {
if (_override_with_best_model) {
dl.warn("_override_with_best_model", "Automatically disabling override_with_best_model, since the final model is the only scored model with n-fold cross-validation.");
_override_with_best_model = false;
}
}
}
if (_loss == Loss.Automatic) {
if (expensive) {
_loss = (classification && !_autoencoder) ? Loss.CrossEntropy : Loss.MeanSquare;
dl.info("_loss", "Automatically setting loss function to " + _loss);
}
}
if (_loss == null) {
if (expensive || dl._nclass != 0) {
dl.error("_loss", "Loss function must be specified. Try CrossEntropy for categorical response (classification), MeanSquare, Absolute or Huber for numerical response (regression).");
}
//otherwise, we might not know whether classification=true or false (from R, for example, the training data isn't known when init(false) is called).
} else {
if (_autoencoder && _loss == Loss.CrossEntropy)
dl.error("_loss", "Cannot use CrossEntropy loss for auto-encoder.");
if (!classification && _loss == Loss.CrossEntropy)
dl.error("_loss", "For CrossEntropy loss, the response must be categorical.");
}
if (!classification && _loss == Loss.CrossEntropy)
dl.error("_loss", "For CrossEntropy loss, the response must be categorical. Either select MeanSquare, Absolute or Huber loss for regression, or use a categorical response.");
if (_score_training_samples < 0) {
dl.error("_score_training_samples", "Number of training samples for scoring must be >= 0 (0 for all).");
}
if (_score_validation_samples < 0) {
dl.error("_score_validation_samples", "Number of training samples for scoring must be >= 0 (0 for all).");
}
if(_autoencoder && _sparsity_beta > 0) {
if (_activation == Activation.Tanh || _activation == Activation.TanhWithDropout) {
if (_average_activation >= 1 || _average_activation <= -1)
dl.error("_average_activation", "Tanh average activation must be in (-1,1).");
}
else if (_activation == Activation.Rectifier || _activation == Activation.RectifierWithDropout) {
if (_average_activation <= 0)
dl.error("_average_activation", "Rectifier average activation must be positive.");
}
}
if (!_autoencoder && _sparsity_beta != 0) dl.info("_sparsity_beta", "Sparsity beta can only be used for autoencoder.");
// reason for the error message below is that validation might not have the same horizontalized features as the training data (or different order)
if (_autoencoder && _activation == Activation.Maxout) dl.error("_activation", "Maxout activation is not supported for auto-encoder.");
if (_max_categorical_features < 1) dl.error("_max_categorical_features", "max_categorical_features must be at least 1.");
if (!_sparse && _col_major) {
dl.error("_col_major", "Cannot use column major storage for non-sparse data handling.");
}
if (expensive) {
if (!classification && _balance_classes) {
dl.error("_balance_classes", "balance_classes requires classification.");
}
if (_class_sampling_factors != null && !_balance_classes) {
dl.error("_class_sampling_factors", "class_sampling_factors requires balance_classes to be enabled.");
}
}
if (_reproducible) {
if (expensive) {
dl.info("_reproducibility",
"Automatically enabling force_load_balancing, disabling single_node_mode and replicate_training_data\n"
+"and setting train_samples_per_iteration to -1 to enforce reproducibility.");
_force_load_balance = true;
_single_node_mode = false;
_train_samples_per_iteration = -1;
_replicate_training_data = false; //there's no benefit from having multiple nodes compute the exact same thing, and then average it back to the same
// replicate_training_data = true; //doesn't hurt, but does replicated identical work
}
}
}
}
public static class DeepLearningModelOutput extends SupervisedModel.SupervisedOutput {
@Override public int nfeatures() {
return _names.length - (autoencoder ? 0 : 1);
}
public DeepLearningModelOutput() { super(); }
public DeepLearningModelOutput(DeepLearning b) { super(b); }
boolean autoencoder;
Key[] weights;
Key[] biases;
DeepLearningScoring errors;
TwoDimTable model_summary;
TwoDimTable scoring_history;
TwoDimTable variable_importances;
double run_time;
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(model_summary.toString());
sb.append(scoring_history.toString());
if (variable_importances != null) sb.append(variable_importances.toString());
return sb.toString();
}
@Override public ModelCategory getModelCategory() {
return autoencoder ? ModelCategory.AutoEncoder : super.getModelCategory();
}
@Override public boolean isSupervised() { return !autoencoder; }
}
// Default publicly visible Schema is V2
public ModelSchema schema() { return new DeepLearningModelV2(); }
private volatile DeepLearningModelInfo model_info;
void set_model_info(DeepLearningModelInfo mi) { model_info = mi; }
final public DeepLearningModelInfo model_info() { return model_info; }
public long run_time;
private long start_time;
public long actual_train_samples_per_iteration;
public double time_for_communication_us; //helper for auto-tuning: time in microseconds for collective bcast/reduce of the model
public double epoch_counter;
public long training_rows;
public long validation_rows;
private DeepLearningScoring[] errors;
public DeepLearningScoring[] scoring_history() { return errors; }
// Keep the best model so far, based on a single criterion (overall class. error or MSE)
private float _bestError = Float.POSITIVE_INFINITY;
public Key actual_best_model_key;
// return the most up-to-date model metrics
DeepLearningScoring last_scored() { return errors == null ? null : errors[errors.length-1]; }
// @Override
public final DeepLearningParameters get_params() { return _parms; }
// @Override public final Request2 job() { return get_params(); }
// double missingColumnsType() { return get_params()._sparse ? 0 : Double.NaN; }
public float error() { return (float) (_output.isClassifier() ? cm().err() : mse()); }
@Override public ModelMetrics.MetricBuilder makeMetricBuilder(String[] domain) {
switch(_output.getModelCategory()) {
case Binomial: return new ModelMetricsBinomial.MetricBuilderBinomial(domain);
case Multinomial: return new ModelMetricsMultinomial.MetricBuilderMultinomial(_output.nclasses(),domain);
case Regression: return new ModelMetricsRegression.MetricBuilderRegression();
case AutoEncoder: return new ModelMetricsAutoEncoder.MetricBuilderAutoEncoder(_output.nfeatures());
default: throw H2O.unimpl("Invalid ModelCategory " + _output.getModelCategory());
}
}
public int compareTo(DeepLearningModel o) {
if (o._output.isClassifier() != _output.isClassifier()) throw new UnsupportedOperationException("Cannot compare classifier against regressor.");
if (o._output.nclasses() != _output.nclasses()) throw new UnsupportedOperationException("Cannot compare models with different number of classes.");
return (error() < o.error() ? -1 : error() > o.error() ? 1 : 0);
}
public static class DeepLearningScoring extends Iced {
// static final int API_WEAVER = 1;
// static public DocGen.FieldDoc[] DOC_FIELDS;
public double epoch_counter;
public long training_samples;
public long training_time_ms;
//training/validation sets
boolean validation;
int num_folds;
public long score_training_samples;
public long score_validation_samples;
public boolean classification;
VarImp variable_importances;
// classification
public ConfusionMatrix train_confusion_matrix;
public ConfusionMatrix valid_confusion_matrix;
public double train_err = Double.NaN;
public double valid_err = Double.NaN;
public double train_logloss = Double.NaN;
public double valid_logloss = Double.NaN;
public AUC2 trainAUC;
public AUC2 validAUC;
public float[] train_hitratio; // "Hit ratio on training data"
public float[] valid_hitratio; // "Hit ratio on validation data"
// regression
public double train_mse = Double.NaN;
public double valid_mse = Double.NaN;
public double train_r2 = Double.NaN;
public double valid_r2 = Double.NaN;
public long scoring_time;
DeepLearningScoring deep_clone() {
AutoBuffer ab = new AutoBuffer();
this.write(ab);
ab.flipForReading();
return (DeepLearningScoring) new DeepLearningScoring().read(ab);
}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("Training MSE: " + train_mse + "\n");
sb.append("Training R^2: " + train_r2 + "\n");
if (classification) {
sb.append("Training LogLoss: " + train_logloss + "\n");
sb.append("Training " + train_confusion_matrix.table().toString(1));
sb.append("Training Misclassification"
+ (trainAUC != null ? " [using threshold for " + AUC2.DEFAULT_CM.toString().replace("_", " ") + "]: " : ": ")
+ String.format("%.2f", 100 * train_err) + "%");
if (trainAUC != null) sb.append(", AUC: " + String.format("%.4f", 100 * trainAUC._auc) + "%");
}
if (validation || num_folds>0) {
if (num_folds > 0) {
sb.append("\nDoing " + num_folds + "-fold cross-validation:");
}
sb.append("\nValidation MSE: " + valid_mse + "\n");
sb.append("Validation R^2: " + valid_r2 + "\n");
if (classification) {
sb.append("Validation LogLoss: " + valid_logloss + "\n");
sb.append("Validation " + valid_confusion_matrix.table().toString(1));
sb.append("Validation Misclassification"
+ (validAUC != null ? " [using threshold for " + AUC2.DEFAULT_CM.toString().replace("_", " ") + "]: " : ": ")
+ String.format("%.2f", (100 * valid_err)) + "%");
if (validAUC != null) sb.append(", AUC: " + String.format("%.4f", 100 * validAUC._auc) + "%");
}
}
sb.append("\n");
return sb.toString();
}
}
final private static class ConfMat extends ConfusionMatrix {
final private double _err;
final private double _f1;
public ConfMat(double err, double f1) {
super(null, null);
_err=err;
_f1=f1;
}
@Override public double err() { return _err; }
@Override public double F1() { return _f1; }
}
public ConfusionMatrix cm() {
final DeepLearningScoring lasterror = last_scored();
if (lasterror == null) return null;
ConfusionMatrix cm = lasterror.validation || lasterror.num_folds > 0 ?
lasterror.valid_confusion_matrix :
lasterror.train_confusion_matrix;
if (cm == null ) {
if (lasterror.validation || lasterror.num_folds > 0) {
return new ConfMat(lasterror.valid_err, lasterror.validAUC != null ? lasterror.validAUC.maxF1() : 0);
} else {
return new ConfMat(lasterror.train_err, lasterror.trainAUC != null ? lasterror.trainAUC.maxF1() : 0);
}
}
return cm;
}
// @Override
public double mse() {
if (errors == null) return Double.NaN;
return last_scored().validation || last_scored().num_folds > 0 ? last_scored().valid_mse : last_scored().train_mse;
}
public double logloss() {
if (errors == null) return Double.NaN;
return last_scored().validation || last_scored().num_folds > 0 ? last_scored().valid_logloss : last_scored().train_logloss;
}
// @Override
public VarImp varimp() {
if (errors == null) return null;
return last_scored().variable_importances;
}
private TwoDimTable createScoringHistoryTable(DeepLearningScoring[] errors) {
return createScoringHistoryTable(errors, 20);
}
private TwoDimTable createScoringHistoryTable(DeepLearningScoring[] errors, final int size_limit) {
assert (size_limit >= 10);
List<String> colHeaders = new ArrayList<>();
List<String> colTypes = new ArrayList<>();
List<String> colFormat = new ArrayList<>();
colHeaders.add("Timestamp"); colTypes.add("string"); colFormat.add("%s");
colHeaders.add("Training Duration"); colTypes.add("string"); colFormat.add("%s");
colHeaders.add("Training Speed"); colTypes.add("string"); colFormat.add("%s");
colHeaders.add("Training Epochs"); colTypes.add("double"); colFormat.add("%.5f");
colHeaders.add("Training Samples"); colTypes.add("long"); colFormat.add("%d");
colHeaders.add("Training MSE"); colTypes.add("double"); colFormat.add("%.5f");
if (!_output.autoencoder) {
colHeaders.add("Training R^2");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.isClassifier()) {
colHeaders.add("Training LogLoss");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.getModelCategory() == ModelCategory.Binomial) {
colHeaders.add("Training AUC");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.getModelCategory() == ModelCategory.Binomial || _output.getModelCategory() == ModelCategory.Multinomial) {
colHeaders.add("Training Classification Error");
colTypes.add("double");
colFormat.add("%.5f");
}
if (get_params()._valid != null) {
colHeaders.add("Validation MSE"); colTypes.add("double"); colFormat.add("%.5f");
if (!_output.autoencoder) {
colHeaders.add("Validation R^2");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.isClassifier()) {
colHeaders.add("Validation LogLoss");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.getModelCategory() == ModelCategory.Binomial) {
colHeaders.add("Validation AUC");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.isClassifier()) {
colHeaders.add("Validation Classification Error");
colTypes.add("double");
colFormat.add("%.5f");
}
} else if (get_params().getNumFolds() > 0) {
colHeaders.add("Cross-Validation MSE"); colTypes.add("double"); colFormat.add("%.5f");
// colHeaders.add("Validation R^2"); colTypes.add("double"); colFormat.add("%g");
if (_output.getModelCategory() == ModelCategory.Binomial) {
colHeaders.add("Cross-Validation AUC");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.isClassifier()) {
colHeaders.add("Cross-Validation Classification Error");
colTypes.add("double");
colFormat.add("%.5f");
}
}
List<Integer> which = new ArrayList<>();
if (errors.length > size_limit) {
// always show first few
which.add(0);
// which.add(1);
// which.add(2);
// which.add(3);
// which.add(4);
// always show last few
// which.add(errors.length-5);
// which.add(errors.length-4);
// which.add(errors.length-3);
// which.add(errors.length-2);
which.add(errors.length-1);
// pick the remaining scoring points from the middle section
final float step = (float)(errors.length-which.size())/(size_limit-which.size());
for (float i=5; i<errors.length-5; i+=step) {
if (which.size() < size_limit) which.add((int)i);
}
}
final int rows = Math.min(size_limit, errors.length);
TwoDimTable table = new TwoDimTable(
"Scoring History", null,
new String[rows],
colHeaders.toArray(new String[0]),
colTypes.toArray(new String[0]),
colFormat.toArray(new String[0]),
"");
int row = 0;
for( int i = 0; i<errors.length ; i++ ) {
if (errors.length > size_limit && !which.contains(new Integer(i))) continue;
final DeepLearningScoring e = errors[i];
int col = 0;
assert(row < table.getRowDim());
assert(col < table.getColDim());
DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
table.set(row, col++, fmt.print(start_time + e.training_time_ms));
table.set(row, col++, PrettyPrint.msecs(e.training_time_ms, true));
table.set(row, col++, e.training_time_ms == 0 ? null : (String.format("%.3f", e.training_samples/(e.training_time_ms/1e3)) + " rows/sec"));
table.set(row, col++, e.epoch_counter);
table.set(row, col++, e.training_samples);
table.set(row, col++, e.train_mse);
if (!_output.autoencoder) {
table.set(row, col++, e.train_r2);
}
if (_output.isClassifier()) {
table.set(row, col++, e.train_logloss);
}
if (_output.getModelCategory() == ModelCategory.Binomial) {
table.set(row, col++, e.trainAUC != null ? e.trainAUC._auc : Double.NaN);
}
if (_output.isClassifier()) {
table.set(row, col++, e.train_err);
}
if (get_params()._valid != null) {
table.set(row, col++, e.valid_mse);
if (!_output.autoencoder) {
table.set(row, col++, e.valid_r2);
}
if (_output.isClassifier()) {
table.set(row, col++, e.valid_logloss);
}
if (_output.getModelCategory() == ModelCategory.Binomial) {
table.set(row, col++, e.validAUC != null ? e.validAUC._auc : Double.NaN);
}
if (_output.isClassifier()) {
table.set(row, col++, e.valid_err);
}
}
else if(get_params().getNumFolds() > 1) {
throw H2O.unimpl("n_folds >= 2 is not (yet) implemented.");
}
row++;
}
return table;
}
// This describes the model, together with the parameters
// This will be shared: one per node
public static class DeepLearningModelInfo extends Iced {
public TwoDimTable summaryTable;
private DataInfo data_info;
public DataInfo data_info() { return data_info; }
// model is described by parameters and the following arrays
private Neurons.DenseRowMatrix[] dense_row_weights; //one 2D weight matrix per layer (stored as a 1D array each)
private Neurons.DenseColMatrix[] dense_col_weights; //one 2D weight matrix per layer (stored as a 1D array each)
private Neurons.DenseVector[] biases; //one 1D bias array per layer
private Neurons.DenseVector[] avg_activations; //one 1D array per hidden layer
// helpers for storing previous step deltas
// Note: These two arrays *could* be made transient and then initialized freshly in makeNeurons() and in DeepLearningTask.initLocal()
// But then, after each reduction, the weights would be lost and would have to restart afresh -> not *exactly* right, but close...
private Neurons.DenseRowMatrix[] dense_row_weights_momenta;
private Neurons.DenseColMatrix[] dense_col_weights_momenta;
private Neurons.DenseVector[] biases_momenta;
// helpers for AdaDelta
private Neurons.DenseRowMatrix[] dense_row_ada_dx_g;
private Neurons.DenseColMatrix[] dense_col_ada_dx_g;
private Neurons.DenseVector[] biases_ada_dx_g;
// compute model size (number of model parameters required for making predictions)
// momenta are not counted here, but they are needed for model building
public long size() {
long siz = 0;
for (Neurons.Matrix w : dense_row_weights) if (w != null) siz += w.size();
for (Neurons.Matrix w : dense_col_weights) if (w != null) siz += w.size();
for (Neurons.Vector b : biases) siz += b.size();
return siz;
}
// accessors to (shared) weights and biases - those will be updated racily (c.f. Hogwild!)
boolean has_momenta() { return get_params()._momentum_start != 0 || get_params()._momentum_stable != 0; }
boolean adaDelta() { return get_params()._adaptive_rate; }
public final Neurons.Matrix get_weights(int i) { return dense_row_weights[i] == null ? dense_col_weights[i] : dense_row_weights[i]; }
public final Neurons.DenseVector get_biases(int i) { return biases[i]; }
public final Neurons.Matrix get_weights_momenta(int i) { return dense_row_weights_momenta[i] == null ? dense_col_weights_momenta[i] : dense_row_weights_momenta[i]; }
public final Neurons.DenseVector get_biases_momenta(int i) { return biases_momenta[i]; }
public final Neurons.Matrix get_ada_dx_g(int i) { return dense_row_ada_dx_g[i] == null ? dense_col_ada_dx_g[i] : dense_row_ada_dx_g[i]; }
public final Neurons.DenseVector get_biases_ada_dx_g(int i) { return biases_ada_dx_g[i]; }
//accessor to shared parameter defining avg activations
public final Neurons.DenseVector get_avg_activations(int i) { return avg_activations[i]; }
private DeepLearningParameters parameters;
public final DeepLearningParameters get_params() { return parameters; }
private float[] mean_rate;
private float[] rms_rate;
private float[] mean_bias;
private float[] rms_bias;
private float[] mean_weight;
public float[] rms_weight;
public float[] mean_a;
private volatile boolean unstable = false;
public boolean unstable() { return unstable; }
public void set_unstable() { if (!unstable) computeStats(); unstable = true; }
private long processed_global;
public synchronized long get_processed_global() { return processed_global; }
public synchronized void set_processed_global(long p) { processed_global = p; }
public synchronized void add_processed_global(long p) { processed_global += p; }
private long processed_local;
public synchronized long get_processed_local() { return processed_local; }
public synchronized void set_processed_local(long p) { processed_local = p; }
public synchronized void add_processed_local(long p) { processed_local += p; }
public synchronized long get_processed_total() { return processed_global + processed_local; }
// package local helpers
int[] units; //number of neurons per layer, extracted from parameters and from datainfo
final boolean _classification; // Classification cache (nclasses>1)
final Frame _train; // Prepared training frame
final Frame _valid; // Prepared validation frame
public DeepLearningModelInfo() {
_classification = false;
_train = _valid = null;
}
public DeepLearningModelInfo(final DeepLearningParameters params, final DataInfo dinfo, boolean classification, Frame train, Frame valid) {
_classification = classification;
_train = train;
_valid = valid;
data_info = dinfo;
parameters = params;
final int num_input = dinfo.fullN();
final int num_output = get_params()._autoencoder ? num_input : (_classification ? train.lastVec().cardinality() : 1);
assert(num_input > 0);
assert(num_output > 0);
if (has_momenta() && adaDelta()) throw new IllegalArgumentException("Cannot have non-zero momentum and adaptive rate at the same time.");
final int layers=get_params()._hidden.length;
// units (# neurons for each layer)
units = new int[layers+2];
if (get_params()._max_categorical_features <= Integer.MAX_VALUE - dinfo._nums)
units[0] = Math.min(dinfo._nums + get_params()._max_categorical_features, num_input);
else
units[0] = num_input;
System.arraycopy(get_params()._hidden, 0, units, 1, layers);
units[layers+1] = num_output;
boolean printLevels = units[0] > 1000L;
boolean warn = units[0] > 100000L;
if (printLevels) {
final String[][] domains = dinfo._adaptedFrame.domains();
int[] levels = new int[domains.length];
for (int i=0; i<levels.length; ++i) {
levels[i] = domains[i] != null ? domains[i].length : 0;
}
Arrays.sort(levels);
if (warn) {
Log.warn("===================================================================================================================================");
Log.warn(num_input + " input features" + (dinfo._cats > 0 ? " (after categorical one-hot encoding)" : "") + ". Can be slow and require a lot of memory.");
}
if (levels[levels.length-1] > 0) {
int levelcutoff = levels[levels.length-1-Math.min(10, levels.length-1)];
int count = 0;
for (int i=0; i<dinfo._adaptedFrame.numCols() - (get_params()._autoencoder ? 0 : 1) && count < 10; ++i) {
if (dinfo._adaptedFrame.domains()[i] != null && dinfo._adaptedFrame.domains()[i].length >= levelcutoff) {
if (warn) {
Log.warn("Categorical feature '" + dinfo._adaptedFrame._names[i] + "' has cardinality " + dinfo._adaptedFrame.domains()[i].length + ".");
} else {
Log.info("Categorical feature '" + dinfo._adaptedFrame._names[i] + "' has cardinality " + dinfo._adaptedFrame.domains()[i].length + ".");
}
}
count++;
}
}
if (warn) {
Log.warn("Suggestions:");
Log.warn(" *) Limit the size of the first hidden layer");
if (dinfo._cats > 0) {
Log.warn(" *) Limit the total number of one-hot encoded features with the parameter 'max_categorical_features'");
Log.warn(" *) Run h2o.interaction(...,pairwise=F) on high-cardinality categorical columns to limit the factor count, see http://learn.h2o.ai");
}
Log.warn("===================================================================================================================================");
}
}
// weights (to connect layers)
dense_row_weights = new Neurons.DenseRowMatrix[layers+1];
dense_col_weights = new Neurons.DenseColMatrix[layers+1];
// decide format of weight matrices row-major or col-major
if (get_params()._col_major) dense_col_weights[0] = new Neurons.DenseColMatrix(units[1], units[0]);
else dense_row_weights[0] = new Neurons.DenseRowMatrix(units[1], units[0]);
for (int i = 1; i <= layers; ++i)
dense_row_weights[i] = new Neurons.DenseRowMatrix(units[i + 1] /*rows*/, units[i] /*cols*/);
// biases (only for hidden layers and output layer)
biases = new Neurons.DenseVector[layers+1];
for (int i=0; i<=layers; ++i) biases[i] = new Neurons.DenseVector(units[i+1]);
// average activation (only for hidden layers)
if (get_params()._autoencoder && get_params()._sparsity_beta > 0) {
avg_activations = new Neurons.DenseVector[layers];
mean_a = new float[layers];
for (int i = 0; i < layers; ++i) avg_activations[i] = new Neurons.DenseVector(units[i + 1]);
}
fillHelpers();
// for diagnostics
mean_rate = new float[units.length];
rms_rate = new float[units.length];
mean_bias = new float[units.length];
rms_bias = new float[units.length];
mean_weight = new float[units.length];
rms_weight = new float[units.length];
}
// deep clone all weights/biases
DeepLearningModelInfo deep_clone() {
AutoBuffer ab = new AutoBuffer();
this.write(ab);
ab.flipForReading();
return (DeepLearningModelInfo) new DeepLearningModelInfo().read(ab);
}
void fillHelpers() {
if (has_momenta()) {
dense_row_weights_momenta = new Neurons.DenseRowMatrix[dense_row_weights.length];
dense_col_weights_momenta = new Neurons.DenseColMatrix[dense_col_weights.length];
if (dense_row_weights[0] != null)
dense_row_weights_momenta[0] = new Neurons.DenseRowMatrix(units[1], units[0]);
else
dense_col_weights_momenta[0] = new Neurons.DenseColMatrix(units[1], units[0]);
for (int i=1; i<dense_row_weights_momenta.length; ++i) dense_row_weights_momenta[i] = new Neurons.DenseRowMatrix(units[i+1], units[i]);
biases_momenta = new Neurons.DenseVector[biases.length];
for (int i=0; i<biases_momenta.length; ++i) biases_momenta[i] = new Neurons.DenseVector(units[i+1]);
}
else if (adaDelta()) {
dense_row_ada_dx_g = new Neurons.DenseRowMatrix[dense_row_weights.length];
dense_col_ada_dx_g = new Neurons.DenseColMatrix[dense_col_weights.length];
//AdaGrad
if (dense_row_weights[0] != null) {
dense_row_ada_dx_g[0] = new Neurons.DenseRowMatrix(units[1], 2*units[0]);
} else {
dense_col_ada_dx_g[0] = new Neurons.DenseColMatrix(2*units[1], units[0]);
}
for (int i=1; i<dense_row_ada_dx_g.length; ++i) {
dense_row_ada_dx_g[i] = new Neurons.DenseRowMatrix(units[i+1], 2*units[i]);
}
biases_ada_dx_g = new Neurons.DenseVector[biases.length];
for (int i=0; i<biases_ada_dx_g.length; ++i) {
biases_ada_dx_g[i] = new Neurons.DenseVector(2*units[i+1]);
}
}
}
public TwoDimTable createSummaryTable() {
Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(this);
TwoDimTable table = new TwoDimTable(
"Status of Neuron Layers",
(!get_params()._autoencoder ? ("predicting " + _train.lastVecName() + ", ") : "") +
(get_params()._autoencoder ? "auto-encoder" :
_classification ? (units[units.length-1] + "-class classification") : "regression" )
+ ", " + get_params()._loss.toString() + " loss",
new String[neurons.length],
new String[]{"#", "Units", "Type", "Dropout", "L1", "L2",
(get_params()._adaptive_rate ? "Rate (Mean,RMS)" : "Rate"),
(get_params()._adaptive_rate ? "" : "Momentum"),
"Weight (Mean,RMS)",
"Bias (Mean,RMS)"
},
new String[]{"integer", "integer", "string", "double", "double", "double",
"string", "string", "string", "string"},
new String[]{"%d", "%d", "%s", "%2.2f %%", "%5f", "%5f", "%s", "%s", "%s", "%s"},
"");
final String format = "%7g";
for (int i = 0; i < neurons.length; ++i) {
table.set(i, 0, i + 1);
table.set(i, 1, neurons[i].units);
table.set(i, 2, neurons[i].getClass().getSimpleName());
if (i == 0) {
table.set(i, 3, neurons[i].params._input_dropout_ratio*100);
continue;
} else if (i < neurons.length - 1) {
if (neurons[i].params._hidden_dropout_ratios == null) {
table.set(i, 3, 0);
} else {
table.set(i, 3, neurons[i].params._hidden_dropout_ratios[i - 1]*100);
}
}
table.set(i, 4, neurons[i].params._l1);
table.set(i, 5, neurons[i].params._l2);
table.set(i, 6, (get_params()._adaptive_rate ? (" (" + String.format(format, mean_rate[i]) + ", " + String.format(format, rms_rate[i]) + ")")
: (String.format("%10g", neurons[i].rate(get_processed_total())))));
table.set(i, 7, get_params()._adaptive_rate ? "" : String.format("%5f", neurons[i].momentum(get_processed_total())));
table.set(i, 8, " (" + String.format(format, mean_weight[i])
+ ", " + String.format(format, rms_weight[i]) + ")");
table.set(i, 9, " (" + String.format(format, mean_bias[i])
+ ", " + String.format(format, rms_bias[i]) + ")");
}
summaryTable = table;
return summaryTable;
}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
if (get_params()._diagnostics && !get_params()._quiet_mode) {
if (get_params()._sparsity_beta > 0) {
for (int k = 0; k < get_params()._hidden.length; k++) {
sb.append("Average activation in hidden layer ").append(k).append(" is ").append(mean_a[k]).append(" \n");
}
}
createSummaryTable();
sb.append(summaryTable.toString(1));
}
return sb.toString();
}
// DEBUGGING
public String toStringAll() {
StringBuilder sb = new StringBuilder();
sb.append(toString());
for (int i=0; i<units.length-1; ++i)
sb.append("\nweights[").append(i).append("][]=").append(Arrays.toString(get_weights(i).raw()));
for (int i=0; i<units.length-1; ++i) {
sb.append("\nbiases[").append(i).append("][]=").append(Arrays.toString(get_biases(i).raw()));
}
if (has_momenta()) {
for (int i=0; i<units.length-1; ++i)
sb.append("\nweights_momenta[").append(i).append("][]=").append(Arrays.toString(get_weights_momenta(i).raw()));
}
if (biases_momenta != null) {
for (int i=0; i<units.length-1; ++i) {
sb.append("\nbiases_momenta[").append(i).append("][]=").append(Arrays.toString(biases_momenta[i].raw()));
}
}
sb.append("\nunits[]=").append(Arrays.toString(units));
sb.append("\nprocessed global: ").append(get_processed_global());
sb.append("\nprocessed local: ").append(get_processed_local());
sb.append("\nprocessed total: ").append(get_processed_total());
sb.append("\n");
return sb.toString();
}
void initializeMembers() {
randomizeWeights();
//TODO: determine good/optimal/best initialization scheme for biases
// hidden layers
for (int i=0; i<get_params()._hidden.length; ++i) {
if (get_params()._activation == DeepLearningParameters.Activation.Rectifier
|| get_params()._activation == DeepLearningParameters.Activation.RectifierWithDropout
|| get_params()._activation == DeepLearningParameters.Activation.Maxout
|| get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout
) {
// Arrays.fill(biases[i], 1.); //old behavior
Arrays.fill(biases[i].raw(), i == 0 ? 0.5f : 1f); //new behavior, might be slightly better
}
else if (get_params()._activation == DeepLearningParameters.Activation.Tanh || get_params()._activation == DeepLearningParameters.Activation.TanhWithDropout) {
Arrays.fill(biases[i].raw(), 0f);
}
}
Arrays.fill(biases[biases.length-1].raw(), 0f); //output layer
}
public void add(DeepLearningModelInfo other) {
for (int i=0;i<dense_row_weights.length;++i)
ArrayUtils.add(get_weights(i).raw(), other.get_weights(i).raw());
for (int i=0;i<biases.length;++i) ArrayUtils.add(biases[i].raw(), other.biases[i].raw());
if (avg_activations != null)
for (int i=0;i<avg_activations.length;++i)
ArrayUtils.add(avg_activations[i].raw(), other.biases[i].raw());
if (has_momenta()) {
assert(other.has_momenta());
for (int i=0;i<dense_row_weights_momenta.length;++i)
ArrayUtils.add(get_weights_momenta(i).raw(), other.get_weights_momenta(i).raw());
for (int i=0;i<biases_momenta.length;++i)
ArrayUtils.add(biases_momenta[i].raw(), other.biases_momenta[i].raw());
}
if (adaDelta()) {
assert(other.adaDelta());
for (int i=0;i<dense_row_ada_dx_g.length;++i) {
ArrayUtils.add(get_ada_dx_g(i).raw(), other.get_ada_dx_g(i).raw());
}
}
add_processed_local(other.get_processed_local());
}
protected void div(float N) {
for (int i=0; i<dense_row_weights.length; ++i)
ArrayUtils.div(get_weights(i).raw(), N);
for (Neurons.Vector bias : biases) ArrayUtils.div(bias.raw(), N);
if (avg_activations != null)
for (Neurons.Vector avgac : avg_activations)
ArrayUtils.div(avgac.raw(), N);
if (has_momenta()) {
for (int i=0; i<dense_row_weights_momenta.length; ++i)
ArrayUtils.div(get_weights_momenta(i).raw(), N);
for (Neurons.Vector bias_momenta : biases_momenta) ArrayUtils.div(bias_momenta.raw(), N);
}
if (adaDelta()) {
for (int i=0;i<dense_row_ada_dx_g.length;++i) {
ArrayUtils.div(get_ada_dx_g(i).raw(), N);
}
}
}
double uniformDist(Random rand, double min, double max) {
return min + rand.nextFloat() * (max - min);
}
void randomizeWeights() {
for (int w=0; w<dense_row_weights.length; ++w) {
final Random rng = water.util.RandomUtils.getRNG(get_params()._seed + 0xBAD5EED + w+1); //to match NeuralNet behavior
final double range = Math.sqrt(6. / (units[w] + units[w+1]));
for( int i = 0; i < get_weights(w).rows(); i++ ) {
for( int j = 0; j < get_weights(w).cols(); j++ ) {
if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.UniformAdaptive) {
if (w==dense_row_weights.length-1 && _classification)
get_weights(w).set(i,j, (float)(4.*uniformDist(rng, -range, range))); //Softmax might need an extra factor 4, since it's like a sigmoid
else
get_weights(w).set(i,j, (float)uniformDist(rng, -range, range));
}
else if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.Uniform) {
get_weights(w).set(i,j, (float)uniformDist(rng, -get_params()._initial_weight_scale, get_params()._initial_weight_scale));
}
else if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.Normal) {
get_weights(w).set(i,j, (float)(rng.nextGaussian() * get_params()._initial_weight_scale));
}
}
}
}
}
// TODO: Add "subset randomize" function
// int count = Math.min(15, _previous.units);
// double min = -.1f, max = +.1f;
// //double min = -1f, max = +1f;
// for( int o = 0; o < units; o++ ) {
// for( int n = 0; n < count; n++ ) {
// int i = rand.nextInt(_previous.units);
// int w = o * _previous.units + i;
// _w[w] = uniformDist(rand, min, max);
/**
* Compute Variable Importance, based on
* GEDEON: DATA MINING OF INPUTS: ANALYSING MAGNITUDE AND FUNCTIONAL MEASURES
* @return variable importances for input features
*/
public float[] computeVariableImportances() {
float[] vi = new float[units[0]];
Arrays.fill(vi, 0f);
float[][] Qik = new float[units[0]][units[2]]; //importance of input i on output k
float[] sum_wj = new float[units[1]]; //sum of incoming weights into first hidden layer
float[] sum_wk = new float[units[2]]; //sum of incoming weights into output layer (or second hidden layer)
for (float[] Qi : Qik) Arrays.fill(Qi, 0f);
Arrays.fill(sum_wj, 0f);
Arrays.fill(sum_wk, 0f);
// compute sum of absolute incoming weights
for( int j = 0; j < units[1]; j++ ) {
for( int i = 0; i < units[0]; i++ ) {
float wij = get_weights(0).get(j, i);
sum_wj[j] += Math.abs(wij);
}
}
for( int k = 0; k < units[2]; k++ ) {
for( int j = 0; j < units[1]; j++ ) {
float wjk = get_weights(1).get(k,j);
sum_wk[k] += Math.abs(wjk);
}
}
// compute importance of input i on output k as product of connecting weights going through j
for( int i = 0; i < units[0]; i++ ) {
for( int k = 0; k < units[2]; k++ ) {
for( int j = 0; j < units[1]; j++ ) {
float wij = get_weights(0).get(j,i);
float wjk = get_weights(1).get(k,j);
//Qik[i][k] += Math.abs(wij)/sum_wj[j] * wjk; //Wong,Gedeon,Taggart '95
Qik[i][k] += Math.abs(wij)/sum_wj[j] * Math.abs(wjk)/sum_wk[k]; //Gedeon '97
}
}
}
// normalize Qik over all outputs k
for( int k = 0; k < units[2]; k++ ) {
float sumQk = 0;
for( int i = 0; i < units[0]; i++ ) sumQk += Qik[i][k];
for( int i = 0; i < units[0]; i++ ) Qik[i][k] /= sumQk;
}
// importance for feature i is the sum over k of i->k importances
for( int i = 0; i < units[0]; i++ ) vi[i] = ArrayUtils.sum(Qik[i]);
//normalize importances such that max(vi) = 1
ArrayUtils.div(vi, ArrayUtils.maxValue(vi));
return vi;
}
// compute stats on all nodes
public void computeStats() {
float[][] rate = get_params()._adaptive_rate ? new float[units.length-1][] : null;
if (get_params()._autoencoder && get_params()._sparsity_beta > 0) {
for (int k = 0; k < get_params()._hidden.length; k++) {
mean_a[k] = 0;
for (int j = 0; j < avg_activations[k].size(); j++)
mean_a[k] += avg_activations[k].get(j);
mean_a[k] /= avg_activations[k].size();
}
}
for( int y = 1; y < units.length; y++ ) {
mean_rate[y] = rms_rate[y] = 0;
mean_bias[y] = rms_bias[y] = 0;
mean_weight[y] = rms_weight[y] = 0;
for(int u = 0; u < biases[y-1].size(); u++) {
mean_bias[y] += biases[y-1].get(u);
}
if (rate != null) rate[y-1] = new float[get_weights(y-1).raw().length];
for(int u = 0; u < get_weights(y-1).raw().length; u++) {
mean_weight[y] += get_weights(y-1).raw()[u];
if (rate != null) {
// final float RMS_dx = (float)Math.sqrt(ada[y-1][2*u]+(float)get_params().epsilon);
// final float invRMS_g = (float)(1/Math.sqrt(ada[y-1][2*u+1]+(float)get_params().epsilon));
final float RMS_dx = MathUtils.approxSqrt(get_ada_dx_g(y-1).raw()[2*u]+(float)get_params()._epsilon);
final float invRMS_g = MathUtils.approxInvSqrt(get_ada_dx_g(y-1).raw()[2*u+1]+(float)get_params()._epsilon);
rate[y-1][u] = RMS_dx*invRMS_g; //not exactly right, RMS_dx should be from the previous time step -> but close enough for diagnostics.
mean_rate[y] += rate[y-1][u];
}
}
mean_bias[y] /= biases[y-1].size();
mean_weight[y] /= get_weights(y-1).size();
if (rate != null) mean_rate[y] /= rate[y-1].length;
for(int u = 0; u < biases[y-1].size(); u++) {
final double db = biases[y-1].get(u) - mean_bias[y];
rms_bias[y] += db * db;
}
for(int u = 0; u < get_weights(y-1).size(); u++) {
final double dw = get_weights(y-1).raw()[u] - mean_weight[y];
rms_weight[y] += dw * dw;
if (rate != null) {
final double drate = rate[y-1][u] - mean_rate[y];
rms_rate[y] += drate * drate;
}
}
rms_bias[y] = MathUtils.approxSqrt(rms_bias[y]/biases[y-1].size());
rms_weight[y] = MathUtils.approxSqrt(rms_weight[y] / get_weights(y - 1).size());
if (rate != null) rms_rate[y] = MathUtils.approxSqrt(rms_rate[y]/rate[y-1].length);
// rms_bias[y] = (float)Math.sqrt(rms_bias[y]/biases[y-1].length);
// rms_weight[y] = (float)Math.sqrt(rms_weight[y]/weights[y-1].length);
// if (rate != null) rms_rate[y] = (float)Math.sqrt(rms_rate[y]/rate[y-1].length);
// Abort the run if weights or biases are unreasonably large (Note that all input values are normalized upfront)
// This can happen with Rectifier units when L1/L2/max_w2 are all set to 0, especially when using more than 1 hidden layer.
final double thresh = 1e10;
unstable |= mean_bias[y] > thresh || isNaN(mean_bias[y])
|| rms_bias[y] > thresh || isNaN(rms_bias[y])
|| mean_weight[y] > thresh || isNaN(mean_weight[y])
|| rms_weight[y] > thresh || isNaN(rms_weight[y]);
}
}
}
/**
* Helper to allocate keys for output frames for weights and biases
* @param destKey
*/
private void makeWeightsBiases(Key destKey) {
if (!model_info.get_params()._export_weights_and_biases) {
_output.weights = null;
_output.biases = null;
} else {
_output.weights = new Key[model_info.get_params()._hidden.length + 1];
for (int i = 0; i < _output.weights.length; ++i) {
_output.weights[i] = Key.makeUserHidden(Key.make(destKey + ".weights." + i));
}
_output.biases = new Key[model_info.get_params()._hidden.length + 1];
for (int i = 0; i < _output.biases.length; ++i) {
_output.biases[i] = Key.makeUserHidden(Key.make(destKey + ".biases." + i));
}
}
}
/** Constructor to restart from a checkpointed model
* @param destKey New destination key for the model
* @param cp Checkpoint to restart from
* @param store_best_model Store only the best model instead of the latest one */
public DeepLearningModel(final Key destKey, final DeepLearningModel cp, final boolean store_best_model, final DataInfo dataInfo) {
super(destKey, (DeepLearningParameters)cp._parms.clone(), (DeepLearningModelOutput)cp._output.clone());
if (store_best_model) {
model_info = cp.model_info.deep_clone(); //don't want to interfere with model being built, just make a deep copy and store that
model_info.data_info = dataInfo.deep_clone(); //replace previous data_info with updated version that's passed in (contains enum for classification)
} else {
model_info = (DeepLearningModelInfo) cp.model_info.clone(); //shallow clone is ok (won't modify the Checkpoint in K-V store during checkpoint restart)
model_info.data_info = dataInfo; //shallow clone is ok
// Ok to modify (the normally immutable read-only) parameters, because
// this is a private copy just cloned above in the super() call.
_parms._checkpoint = cp._key; //it's only a "real" checkpoint if job != null, otherwise a best model copy
}
actual_best_model_key = cp.actual_best_model_key;
start_time = cp.start_time;
run_time = cp.run_time;
training_rows = cp.training_rows; //copy the value to display the right number on the model page before training has started
validation_rows = cp.validation_rows; //copy the value to display the right number on the model page before training has started
_bestError = cp._bestError;
// deep clone scoring history
errors = cp.errors.clone();
for (int i=0; i<errors.length;++i)
errors[i] = cp.errors[i].deep_clone();
_output.errors = last_scored();
makeWeightsBiases(destKey);
_output.scoring_history = createScoringHistoryTable(errors);
_output.variable_importances = calcVarImp(last_scored().variable_importances);
// set proper timing
_timeLastScoreEnter = System.currentTimeMillis();
_timeLastScoreStart = 0;
_timeLastScoreEnd = 0;
_timeLastPrintStart = 0;
assert(Arrays.equals(_key._kb, destKey._kb));
}
public DeepLearningModel(final Key destKey, final DeepLearningParameters parms, final DeepLearningModelOutput output, Frame train, Frame valid) {
super(destKey, parms, output);
boolean classification = train.lastVec().isEnum();
final DataInfo dinfo = new DataInfo(Key.make(), train, valid, parms._autoencoder ? 0 : 1, parms._autoencoder || parms._use_all_factor_levels, //use all FactorLevels for auto-encoder
parms._autoencoder ? DataInfo.TransformType.NORMALIZE : DataInfo.TransformType.STANDARDIZE, //transform predictors
classification ? DataInfo.TransformType.NONE : DataInfo.TransformType.STANDARDIZE, _parms._missing_values_handling == DeepLearningModel.DeepLearningParameters.MissingValuesHandling.Skip);
output._names = train._names ; // Since changed by DataInfo, need to be reflected in the Model output as well
output._domains= train.domains();
DKV.put(dinfo._key,dinfo);
model_info = new DeepLearningModelInfo(parms, dinfo, classification, train, valid);
actual_best_model_key = Key.makeUserHidden(Key.make());
if (parms.getNumFolds() != 0) actual_best_model_key = null;
if (!parms._autoencoder) {
errors = new DeepLearningScoring[1];
errors[0] = new DeepLearningScoring();
errors[0].validation = (parms._valid != null);
errors[0].num_folds = parms.getNumFolds();
_output.errors = last_scored();
_output.scoring_history = createScoringHistoryTable(errors);
_output.variable_importances = calcVarImp(last_scored().variable_importances);
}
makeWeightsBiases(destKey);
run_time = 0;
start_time = System.currentTimeMillis();
_timeLastScoreEnter = start_time;
assert _key.equals(destKey);
}
public long _timeLastScoreEnter; //not transient: needed for HTML display page
transient private long _timeLastScoreStart;
transient private long _timeLastScoreEnd;
transient private long _timeLastPrintStart;
/**
*
* @param ftrain potentially downsampled training data for scoring
* @param ftest potentially downsampled validation data for scoring
* @param job_key key of the owning job
* @param progressKey key of the progress
* @return true if model building is ongoing
*/
boolean doScoring(Frame ftrain, Frame ftest, Key job_key, Key progressKey) {
boolean keep_running;
try {
final long now = System.currentTimeMillis();
epoch_counter = (float)model_info().get_processed_total()/training_rows;
final double time_last_iter_millis = Math.max(5,now-_timeLastScoreEnter);
// Auto-tuning
// if multi-node and auto-tuning and at least 10 ms for communication (to avoid doing thins on multi-JVM on same node),
// then adjust the auto-tuning parameter 'actual_train_samples_per_iteration' such that the targeted ratio of comm to comp is achieved
// Note: actual communication time is estimated by the NetworkTest's collective test.
if (H2O.CLOUD.size() > 1 && get_params()._train_samples_per_iteration == -2 && time_for_communication_us > 1e4) {
// Log.info("Time taken for communication: " + PrettyPrint.usecs((long)time_for_communication_us));
// Log.info("Time taken for Map/Reduce iteration: " + PrettyPrint.msecs((long)time_last_iter_millis, true));
final double comm_to_work_ratio = (time_for_communication_us *1e-3) / time_last_iter_millis;
// Log.info("Ratio of network communication to computation: " + String.format("%.3f", comm_to_work_ratio));
// Log.info("target_comm_to_work: " + get_params().target_ratio_comm_to_comp);
final double correction = get_params()._target_ratio_comm_to_comp / comm_to_work_ratio;
// Log.warn("Suggested value for train_samples_per_iteration: " + get_params().actual_train_samples_per_iteration/correction);
actual_train_samples_per_iteration /= correction;
actual_train_samples_per_iteration = Math.max(1, actual_train_samples_per_iteration);
}
run_time += time_last_iter_millis;
_timeLastScoreEnter = now;
keep_running = (epoch_counter < get_params()._epochs);
final long sinceLastScore = now -_timeLastScoreStart;
final long sinceLastPrint = now -_timeLastPrintStart;
if (!keep_running || sinceLastPrint > get_params()._score_interval * 1000) { //print this after every score_interval, not considering duty cycle
_timeLastPrintStart = now;
if (!get_params()._quiet_mode) {
Log.info("Training time: " + PrettyPrint.msecs(run_time, true)
+ ". Processed " + String.format("%,d", model_info().get_processed_total()) + " samples" + " (" + String.format("%.3f", epoch_counter) + " epochs)."
+ " Speed: " + String.format("%.3f", 1000. * model_info().get_processed_total() / run_time) + " samples/sec.\n");
}
}
// this is potentially slow - only do every so often
if( !keep_running ||
(sinceLastScore > get_params()._score_interval *1000 //don't score too often
&&(double)(_timeLastScoreEnd-_timeLastScoreStart)/sinceLastScore < get_params()._score_duty_cycle) ) { //duty cycle
if (progressKey != null) {
new Job.ProgressUpdate("Scoring on " + ftrain.numRows() + " training samples" +
(ftest != null ? (", " + ftest.numRows() + " validation samples)") : ")")
).fork(progressKey);
}
final boolean printme = !get_params()._quiet_mode;
_timeLastScoreStart = now;
if (get_params()._diagnostics) model_info().computeStats();
DeepLearningScoring err = new DeepLearningScoring();
err.training_time_ms = run_time;
err.epoch_counter = epoch_counter;
err.training_samples = model_info().get_processed_total();
err.validation = ftest != null;
err.score_training_samples = ftrain.numRows();
err.classification = _output.isClassifier();
if (get_params()._autoencoder) {
if (printme) Log.info("Scoring the auto-encoder.");
// training
{
final Frame mse_frame = scoreAutoEncoder(ftrain, Key.make());
final Vec l2 = mse_frame.anyVec();
Log.info("Mean reconstruction error on training data: " + l2.mean() + "\n");
err.train_mse = l2.mean();
mse_frame.delete();
}
if (ftest != null) {
final Frame mse_frame = scoreAutoEncoder(ftest, Key.make());
final Vec l2 = mse_frame.anyVec();
Log.info("Mean reconstruction error on validation data: " + l2.mean() + "\n");
err.valid_mse = l2.mean();
mse_frame.delete();
}
} else {
if (printme) Log.info("Scoring the model.");
// compute errors
final String m = model_info().toString();
if (m.length() > 0) Log.info(m);
final Frame trainPredict = score(ftrain);
trainPredict.delete();
hex.ModelMetricsSupervised mm1 = (ModelMetricsSupervised)ModelMetrics.getFromDKV(this,ftrain);
if (mm1 instanceof ModelMetricsBinomial) {
ModelMetricsBinomial mm = (ModelMetricsBinomial)(mm1);
err.trainAUC = mm._auc;
err.train_confusion_matrix = mm.cm();
err.train_err = err.train_confusion_matrix.err();
err.train_logloss = mm._logloss;
}
else if (mm1 instanceof ModelMetricsMultinomial) {
ModelMetricsMultinomial mm = (ModelMetricsMultinomial)(mm1);
err.train_confusion_matrix = mm.cm();
err.train_err = err.train_confusion_matrix.err();
err.train_logloss = mm._logloss;
err.train_hitratio = mm._hit_ratios;
}
err.train_mse = mm1._mse;
err.train_r2 = mm1.r2();
_output._train_metrics = mm1;
if (get_params()._score_training_samples != 0) {
_output._train_metrics._description = "Metrics reported on " + ftrain.numRows() + " training set samples";
}
_output.run_time = run_time;
if (ftest != null) {
Frame validPred = score(ftest);
validPred.delete();
hex.ModelMetricsSupervised mm2 = (ModelMetricsSupervised)hex.ModelMetrics.getFromDKV(this, ftest);
if (mm2 != null) {
if (mm2 instanceof ModelMetricsBinomial) {
ModelMetricsBinomial mm = (ModelMetricsBinomial) (mm2);
err.validAUC = mm._auc;
err.valid_confusion_matrix = mm.cm();
err.valid_logloss = mm._logloss;
err.valid_err = err.valid_confusion_matrix.err();
} else if (mm2 instanceof ModelMetricsMultinomial) {
ModelMetricsMultinomial mm = (ModelMetricsMultinomial) (mm2);
err.valid_confusion_matrix = mm.cm();
err.valid_err = err.valid_confusion_matrix.err();
err.valid_logloss = mm._logloss;
err.valid_hitratio = mm._hit_ratios;
}
err.valid_mse = mm2._mse;
err.valid_r2 = mm2.r2();
_output._valid_metrics = mm2;
if (get_params()._score_validation_samples != 0 && get_params()._score_validation_samples != ftest.numRows()) {
_output._valid_metrics._description = "Metrics reported on " + ftest.numRows() + " validation set samples";
if (get_params()._score_validation_sampling == DeepLearningParameters.ClassSamplingMethod.Stratified) {
_output._valid_metrics._description += " (stratified sampling)";
}
}
}
}
}
if (get_params()._variable_importances) {
if (!get_params()._quiet_mode) Log.info("Computing variable importances.");
final float[] vi = model_info().computeVariableImportances();
err.variable_importances = new VarImp(vi, Arrays.copyOfRange(model_info().data_info().coefNames(), 0, vi.length));
}
_timeLastScoreEnd = System.currentTimeMillis();
err.scoring_time = System.currentTimeMillis() - now;
// enlarge the error array by one, push latest score back
if (errors == null) {
errors = new DeepLearningScoring[]{err};
} else {
DeepLearningScoring[] err2 = new DeepLearningScoring[errors.length + 1];
System.arraycopy(errors, 0, err2, 0, errors.length);
err2[err2.length - 1] = err;
errors = err2;
}
_output.errors = last_scored();
water.util.Timer t = new Timer();
// store weights and matrices to Frames
if (_output.weights != null && _output.biases != null) {
for (int i = 0; i < _output.weights.length; ++i) {
model_info.get_weights(i).toFrame(_output.weights[i]);
}
for (int i = 0; i < _output.biases.length; ++i) {
model_info.get_biases(i).toFrame(_output.biases[i]);
}
Log.info("Writing weights and biases to Frames took " + t.time()/1000. + " seconds.");
}
_output.scoring_history = createScoringHistoryTable(errors);
_output.variable_importances = calcVarImp(last_scored().variable_importances);
_output.model_summary = model_info.createSummaryTable();
if (!get_params()._autoencoder) {
// always keep a copy of the best model so far (based on the following criterion)
if (actual_best_model_key != null && get_params()._override_with_best_model && (
// if we have a best_model in DKV, then compare against its error() (unless it's a different model as judged by the network size)
(DKV.get(actual_best_model_key) != null && (error() < DKV.get(actual_best_model_key).<DeepLearningModel>get().error() || !Arrays.equals(model_info().units, DKV.get(actual_best_model_key).<DeepLearningModel>get().model_info().units)))
||
// otherwise, compare against our own _bestError
(DKV.get(actual_best_model_key) == null && error() < _bestError)
) ) {
if (!get_params()._quiet_mode)
Log.info("Error reduced from " + _bestError + " to " + error() + ".");
_bestError = error();
putMeAsBestModel(actual_best_model_key);
// debugging check
//if (false) {
// DeepLearningModel bestModel = DKV.get(actual_best_model_key).get();
// final Frame fr = ftest != null ? ftest : ftrain;
// final Frame bestPredict = bestModel.score(fr);
// final Frame hitRatio_bestPredict = new Frame(bestPredict);
// final double err3 = calcError(fr, fr.lastVec(), bestPredict, hitRatio_bestPredict, "cross-check",
// printme, get_params()._max_confusion_matrix_size, new hex.ConfusionMatrix2(), _output.isClassifier() && _output.nclasses() == 2 ? new AUC(null,null) : null, null);
// if (_output.isClassifier())
// assert (ftest != null ? Math.abs(err.valid_err - err3) < 1e-5 : Math.abs(err.train_err - err3) < 1e-5);
// else
// assert (ftest != null ? Math.abs(err.valid_mse - err3) < 1e-5 : Math.abs(err.train_mse - err3) < 1e-5);
// bestPredict.delete();
}
// else {
// // keep output JSON small
// if (errors.length > 1) {
// if (last_scored().trainAUC != null) last_scored().trainAUC.clear();
// if (last_scored().validAUC != null) last_scored().validAUC.clear();
// last_scored().variable_importances = null;
// print the freshly scored model to ASCII
if (keep_running)
for (String s : toString().split("\n")) Log.info(s);
if (printme) Log.info("Time taken for scoring and diagnostics: " + PrettyPrint.msecs(err.scoring_time, true));
}
}
if (model_info().unstable()) {
Log.warn(unstable_msg);
keep_running = false;
} else if ( (_output.isClassifier() && last_scored().train_err <= get_params()._classification_stop)
|| (!_output.isClassifier() && last_scored().train_mse <= get_params()._regression_stop) ) {
Log.info("Achieved requested predictive accuracy on the training data. Model building completed.");
keep_running = false;
}
update(job_key);
}
catch (Exception ex) {
//ex.printStackTrace();
throw new RuntimeException(ex);
// return false;
}
return keep_running;
}
// @Override protected void setCrossValidationError(Parameters job, double cv_error, ConfusionMatrix cm, AUCData auc, HitRatio hr) {
// _have_cv_results = true;
// if (!get_params().classification)
// last_scored().valid_mse = cv_error;
// else
// last_scored().valid_err = cv_error;
// last_scored().score_validation_samples = last_scored().score_training_samples / get_params().n_folds;
// last_scored().num_folds = get_params().n_folds;
// last_scored().valid_confusion_matrix = cm;
// last_scored().validAUC = auc;
// last_scored().valid_hitratio = hr;
// DKV.put(this._key, this); //overwrite this model
@Override public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(model_info.toString());
//sb.append(last_scored().toString());
sb.append(_output.scoring_history.toString());
if (_output.variable_importances != null) {
for (String s : Arrays.asList(_output.variable_importances.toString().split("\n")).subList(0, 12))
sb.append(s).append("\n");
}
return sb.toString();
}
/** Make either a prediction or a reconstruction.
* @param orig Test dataset
* @param adaptedFr Test dataset, adapted to the model
* @return A frame containing the prediction or reconstruction
*/
@Override protected Frame scoreImpl(Frame orig, Frame adaptedFr, String destination_key) {
if (!get_params()._autoencoder) {
return super.scoreImpl(orig,adaptedFr,destination_key);
} else {
// Reconstruction
final int len = model_info().data_info().fullN();
String prefix = "reconstr_";
assert(model_info().data_info()._responses == 0);
String[] coefnames = model_info().data_info().coefNames();
assert(len == coefnames.length);
Frame adaptFrm = new Frame(adaptedFr);
for( int c=0; c<len; c++ )
adaptFrm.add(prefix+coefnames[c],adaptFrm.anyVec().makeZero());
new MRTask() {
@Override public void map( Chunk chks[] ) {
double tmp [] = new double[_output._names.length];
float preds[] = new float [len];
final Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info);
for( int row=0; row<chks[0]._len; row++ ) {
float p[] = score_autoencoder(chks, row, tmp, preds, neurons);
for( int c=0; c<preds.length; c++ )
chks[_output._names.length+c].set(row,p[c]);
}
}
}.doAll(adaptFrm);
// Return the predicted columns
int x=_output._names.length, y=adaptFrm.numCols();
Frame f = adaptFrm.extractFrame(x, y); //this will call vec_impl() and we cannot call the delete() below just yet
f = new Frame((null == destination_key ? Key.make() : Key.make(destination_key)), f.names(), f.vecs());
DKV.put(f);
makeMetricBuilder(null).makeModelMetrics(this, orig, Double.NaN);
return f;
}
}
/**
* Predict from raw double values representing the data
* @param data raw array containing categorical values (horizontalized to 1,0,0,1,0,0 etc.) and numerical values (0.35,1.24,5.3234,etc), both can contain NaNs
* @param preds predicted label and per-class probabilities (for classification), predicted target (regression), can contain NaNs
* @return preds, can contain NaNs
*/
@Override public double[] score0(double[] data, double[] preds) {
if (model_info().unstable()) {
Log.warn(unstable_msg);
throw new UnsupportedOperationException("Trying to predict with an unstable model.");
}
Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info);
((Neurons.Input)neurons[0]).setInput(-1, data);
DeepLearningTask.step(-1, neurons, model_info, false, null);
float[] out = neurons[neurons.length - 1]._a.raw();
if (_output.isClassifier()) {
assert (preds.length == out.length + 1);
for (int i = 0; i < preds.length - 1; ++i) {
preds[i + 1] = out[i];
if (Double.isNaN(preds[i + 1])) throw new RuntimeException("Predicted class probability NaN!");
}
preds[0] = hex.genmodel.GenModel.getPrediction(preds, data);
} else {
if (model_info().data_info()._normRespMul != null)
preds[0] = (out[0] / model_info().data_info()._normRespMul[0] + model_info().data_info()._normRespSub[0]);
else
preds[0] = out[0];
if (Double.isNaN(preds[0])) throw new RuntimeException("Predicted regression target NaN!");
}
return preds;
}
/**
* Score auto-encoded reconstruction (on-the-fly, without allocating the reconstruction as done in Frame score(Frame fr))
* @param frame Original data (can contain response, will be ignored)
* @return Frame containing one Vec with reconstruction error (MSE) of each reconstructed row, caller is responsible for deletion
*/
public Frame scoreAutoEncoder(Frame frame, Key destination_key) {
if (!get_params()._autoencoder)
throw new H2OIllegalArgumentException("Only for AutoEncoder Deep Learning model.", "");
final int len = _output._names.length;
Frame adaptFrm = new Frame(frame);
Vec v0 = adaptFrm.anyVec().makeZero();
Scope.enter();
adaptTestForTrain(adaptFrm,true);
adaptFrm.add("Reconstruction.MSE", v0);
new MRTask() {
@Override public void map( Chunk chks[] ) {
double tmp [] = new double[len];
final Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info);
for( int row=0; row<chks[0]._len; row++ ) {
for( int i=0; i<len; i++ )
tmp[i] = chks[i].atd(row);
//store the per-row reconstruction error (MSE) in the last column
chks[len].set(row, score_autoencoder(tmp, null, neurons));
}
}
}.doAll(adaptFrm);
Scope.exit();
Frame res = adaptFrm.extractFrame(len, adaptFrm.numCols());
res = new Frame(destination_key, res.names(), res.vecs());
DKV.put(res);
makeMetricBuilder(null).makeModelMetrics(this, frame, res.vecs()[0].mean());
return res;
}
@Override public Frame score(Frame fr, String destination_key) {
if (!_parms._autoencoder)
return super.score(fr, destination_key);
else {
Frame adaptFr = new Frame(fr);
adaptTestForTrain(adaptFr, true); // Adapt
Frame output = scoreImpl(fr, adaptFr, destination_key); // Score
Vec[] vecs = adaptFr.vecs();
for (int i = 0; i < vecs.length; i++)
if (fr.find(vecs[i]) != -1) // Exists in the original frame?
vecs[i] = null; // Do not delete it
adaptFr.delete();
return output;
}
}
/**
* Score auto-encoded reconstruction (on-the-fly, and materialize the deep features of given layer
* @param frame Original data (can contain response, will be ignored)
* @param layer index of the hidden layer for which to extract the features
* @return Frame containing the deep features (#cols = hidden[layer])
*/
public Frame scoreDeepFeatures(Frame frame, final int layer) {
if (layer < 0 || layer >= model_info().get_params()._hidden.length)
throw new H2OIllegalArgumentException("hidden layer (index) to extract must be between " + 0 + " and " + (model_info().get_params()._hidden.length-1),"");
final int len = _output.nfeatures();
Vec resp = null;
if (isSupervised()) {
int ridx = frame.find(_output.responseName());
if (ridx != -1) { // drop the response for scoring!
frame = new Frame(frame);
resp = frame.vecs()[ridx];
frame.remove(ridx);
}
}
Frame adaptFrm = new Frame(frame);
//create new features, will be dense
final int features = model_info().get_params()._hidden[layer];
Vec[] vecs = adaptFrm.anyVec().makeZeros(features);
Scope.enter();
adaptTestForTrain(adaptFrm,true);
for (int j=0; j<features; ++j) {
adaptFrm.add("DF.C" + (j+1), vecs[j]);
}
new MRTask() {
@Override public void map( Chunk chks[] ) {
double tmp [] = new double[len];
final Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info);
for( int row=0; row<chks[0]._len; row++ ) {
for( int i=0; i<len; i++ )
tmp[i] = chks[i].atd(row);
((Neurons.Input)neurons[0]).setInput(-1, tmp);
DeepLearningTask.step(-1, neurons, model_info, false, null);
float[] out = neurons[layer+1]._a.raw(); //extract the layer-th hidden feature
for( int c=0; c<features; c++ )
chks[_output._names.length+c].set(row,out[c]);
}
}
}.doAll(adaptFrm);
// Return just the output columns
int x=_output._names.length, y=adaptFrm.numCols();
Frame ret = adaptFrm.extractFrame(x, y);
if (resp != null) ret.prepend(_output.responseName(), resp);
Scope.exit();
return ret;
}
// Make (potentially expanded) reconstruction
private float[] score_autoencoder(Chunk[] chks, int row_in_chunk, double[] tmp, float[] preds, Neurons[] neurons) {
assert(get_params()._autoencoder);
assert(tmp.length == _output._names.length);
for( int i=0; i<tmp.length; i++ )
tmp[i] = chks[i].atd(row_in_chunk);
score_autoencoder(tmp, preds, neurons); // this fills preds, returns MSE error (ignored here)
return preds;
}
/**
* Helper to reconstruct original data into preds array and compute the reconstruction error (MSE)
* @param data Original data (unexpanded)
* @param preds Reconstruction (potentially expanded)
* @return reconstruction error
*/
private double score_autoencoder(double[] data, float[] preds, Neurons[] neurons) {
assert(model_info().get_params()._autoencoder);
if (model_info().unstable()) {
Log.warn(unstable_msg);
throw new UnsupportedOperationException("Trying to predict with an unstable model.");
}
((Neurons.Input)neurons[0]).setInput(-1, data); // expands categoricals inside
DeepLearningTask.step(-1, neurons, model_info, false, null); // reconstructs data in expanded space
float[] in = neurons[0]._a.raw(); //input (expanded)
float[] out = neurons[neurons.length - 1]._a.raw(); //output (expanded)
assert(in.length == out.length);
// First normalize categorical reconstructions to be probabilities
// (such that they can be better compared to the input where one factor was 1 and the rest was 0)
// model_info().data_info().softMaxCategoricals(out,out); //only modifies the categoricals
// Compute MSE of reconstruction in expanded space (with categorical probabilities)
double l2 = 0;
for (int i = 0; i < in.length; ++i)
l2 += Math.pow((out[i] - in[i]), 2);
l2 /= in.length;
if (preds!=null) {
// Now scale back numerical columns to original data space (scale + shift)
model_info().data_info().unScaleNumericals(out, out); //only modifies the numericals
System.arraycopy(out, 0, preds, 0, out.length); //copy reconstruction into preds
}
return l2;
}
/**
* Compute quantile-based threshold (in reconstruction error) to find outliers
* @param mse Vector containing reconstruction errors
* @param quantile Quantile for cut-off
* @return Threshold in MSE value for a point to be above the quantile
*/
public double calcOutlierThreshold(Vec mse, double quantile) {
Frame mse_frame = new Frame(Key.make(), new String[]{"Reconstruction.MSE"}, new Vec[]{mse});
DKV.put(mse_frame._key, mse_frame);
QuantileModel.QuantileParameters parms = new QuantileModel.QuantileParameters();
parms._train = mse_frame._key;
parms._probs = new double[]{quantile};
Quantile job = new Quantile(parms).trainModel();
QuantileModel kmm = job.get();
job.remove();
double q = kmm._output._quantiles[0][0];
kmm.delete();
DKV.remove(mse_frame._key);
return q;
}
// helper to push this model to another key (for keeping good models)
private void putMeAsBestModel(Key bestModelKey) {
DeepLearningModel bestModel = new DeepLearningModel(bestModelKey, this, true, model_info().data_info());
DKV.put(bestModel._key, bestModel);
assert (DKV.get(bestModelKey) != null);
assert (bestModel.compareTo(this) <= 0);
}
@Override public void delete() {
if (_output.weights != null && _output.biases != null) {
for (Key k : _output.weights) {
if (DKV.getGet(k) != null) ((Frame) DKV.getGet(k)).delete();
}
for (Key k : _output.biases) {
if (DKV.getGet(k) != null) ((Frame) DKV.getGet(k)).delete();
}
}
super.delete();
}
void delete_xval_models( ) {
// if (get_params().xval_models != null) {
// for (Key k : get_params().xval_models) {
// DKV.get(k).<DeepLearningModel>get().delete_best_model();
// DKV.get(k).<DeepLearningModel>get().delete();
}
private String getHeader() {
assert get_params()._autoencoder;
StringBuilder sb = new StringBuilder();
final int len = model_info().data_info().fullN();
String prefix = "reconstr_";
assert (model_info().data_info()._responses == 0);
String[] coefnames = model_info().data_info().coefNames();
assert (len == coefnames.length);
for (int c = 0; c < len; c++) {
if (c>0) sb.append(",");
sb.append(prefix + coefnames[c]);
}
return sb.toString();
}
@Override protected SB toJavaInit(SB sb, SB fileContextSB) {
sb = super.toJavaInit(sb, fileContextSB);
String mname = JCodeGen.toJavaId(_key.toString());
Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info());
sb.ip("public boolean isSupervised() { return " + isSupervised() + "; }").nl();
sb.ip("public int nfeatures() { return "+_output.nfeatures()+"; }").nl();
sb.ip("public int nclasses() { return "+ (_parms._autoencoder ? neurons[neurons.length-1].units : _output.nclasses()) + "; }").nl();
sb.ip("public ModelCategory getModelCategory() { return ModelCategory."+_output.getModelCategory()+"; }").nl();
if (model_info().data_info()._nums > 0) {
JCodeGen.toStaticVar(sb, "NUMS", new double[model_info().data_info()._nums], "Workspace for storing numerical input variables.");
JCodeGen.toStaticVar(sb, "NORMMUL", model_info().data_info()._normMul, "Standardization/Normalization scaling factor for numerical variables.");
JCodeGen.toStaticVar(sb, "NORMSUB", model_info().data_info()._normSub, "Standardization/Normalization offset for numerical variables.");
}
if (model_info().data_info()._cats > 0) {
JCodeGen.toStaticVar(sb, "CATS", new int[model_info().data_info()._cats], "Workspace for storing categorical input variables.");
}
JCodeGen.toStaticVar(sb, "CATOFFSETS", model_info().data_info()._catOffsets, "Workspace for categorical offsets.");
if (model_info().data_info()._normRespMul != null) {
JCodeGen.toStaticVar(sb, "NORMRESPMUL", model_info().data_info()._normRespMul, "Standardization/Normalization scaling factor for response.");
JCodeGen.toStaticVar(sb, "NORMRESPSUB", model_info().data_info()._normRespSub, "Standardization/Normalization offset for response.");
}
if (get_params()._hidden_dropout_ratios != null) {
JCodeGen.toStaticVar(sb, "HIDDEN_DROPOUT_RATIOS", get_params()._hidden_dropout_ratios, "Hidden layer dropout ratios.");
}
int[] layers = new int[neurons.length];
for (int i=0;i<neurons.length;++i)
layers[i] = neurons[i].units;
JCodeGen.toStaticVar(sb, "NEURONS", layers, "Number of neurons for each layer.");
if (get_params()._autoencoder) {
sb.i(1).p("public int getPredsSize() { return " + model_info.units[model_info.units.length-1] + "; }").nl();
sb.i(1).p("public boolean isAutoEncoder() { return true; }").nl();
sb.i(1).p("public String getHeader() { return \"" + getHeader() + "\"; }").nl();
}
// activation storage
sb.i(1).p("// Storage for neuron activation values.").nl();
sb.i(1).p("public static final float[][] ACTIVATION = new float[][] {").nl();
for (int i=0; i<neurons.length; i++) {
String colInfoClazz = mname + "_Activation_"+i;
sb.i(2).p("/* ").p(neurons[i].getClass().getSimpleName()).p(" */ ");
sb.p(colInfoClazz).p(".VALUES");
if (i!=neurons.length-1) sb.p(',');
sb.nl();
fileContextSB.i().p("// Neuron activation values for ").p(neurons[i].getClass().getSimpleName()).p(" layer").nl();
JCodeGen.toClassWithArray(fileContextSB, null, colInfoClazz, new float[layers[i]]);
}
sb.i(1).p("};").nl();
// biases
sb.i(1).p("// Neuron bias values.").nl();
sb.i(1).p("public static final float[][] BIAS = new float[][] {").nl();
for (int i=0; i<neurons.length; i++) {
String colInfoClazz = mname + "_Bias_"+i;
sb.i(2).p("/* ").p(neurons[i].getClass().getSimpleName()).p(" */ ");
sb.p(colInfoClazz).p(".VALUES");
if (i!=neurons.length-1) sb.p(',');
sb.nl();
fileContextSB.i().p("// Neuron bias values for ").p(neurons[i].getClass().getSimpleName()).p(" layer").nl();
float[] bias = i == 0 ? null : new float[model_info().get_biases(i-1).size()];
if (i>0) {
for (int j=0; j<bias.length; ++j) bias[j] = model_info().get_biases(i-1).get(j);
}
JCodeGen.toClassWithArray(fileContextSB, null, colInfoClazz, bias);
}
sb.i(1).p("};").nl();
// weights
sb.i(1).p("// Connecting weights between neurons.").nl();
sb.i(1).p("public static final float[][] WEIGHT = new float[][] {").nl();
for (int i=0; i<neurons.length; i++) {
String colInfoClazz = mname + "_Weight_"+i;
sb.i(2).p("/* ").p(neurons[i].getClass().getSimpleName()).p(" */ ");
sb.p(colInfoClazz).p(".VALUES");
if (i!=neurons.length-1) sb.p(',');
sb.nl();
if (i > 0) {
fileContextSB.i().p("// Neuron weights connecting ").
p(neurons[i - 1].getClass().getSimpleName()).p(" and ").
p(neurons[i].getClass().getSimpleName()).
p(" layer").nl();
}
float[] weights = i == 0 ? null : new float[model_info().get_weights(i-1).rows()*model_info().get_weights(i-1).cols()];
if (i>0) {
final int rows = model_info().get_weights(i-1).rows();
final int cols = model_info().get_weights(i-1).cols();
for (int j=0; j<rows; ++j)
for (int k=0; k<cols; ++k)
weights[j*cols+k] = model_info().get_weights(i-1).get(j,k);
}
JCodeGen.toClassWithArray(fileContextSB, null, colInfoClazz, weights);
}
sb.i(1).p("};").nl();
return sb;
}
@Override protected void toJavaPredictBody( final SB bodySb, final SB classCtxSb, final SB fileCtxSb) {
SB model = new SB();
bodySb.i().p("java.util.Arrays.fill(preds,0);").nl();
final int cats = model_info().data_info()._cats;
final int nums = model_info().data_info()._nums;
// initialize input layer
if (nums > 0) bodySb.i().p("java.util.Arrays.fill(NUMS,0f);").nl();
if (cats > 0) bodySb.i().p("java.util.Arrays.fill(CATS,0);").nl();
bodySb.i().p("int i = 0, ncats = 0;").nl();
if (cats > 0) {
bodySb.i().p("for(; i<"+cats+"; ++i) {").nl();
bodySb.i(1).p("if (!Double.isNaN(data[i])) {").nl();
bodySb.i(2).p("int c = (int) data[i];").nl();
if (model_info().data_info()._useAllFactorLevels)
bodySb.i(2).p("CATS[ncats++] = c + CATOFFSETS[i];").nl();
else
bodySb.i(2).p("if (c != 0) CATS[ncats++] = c + CATOFFSETS[i] - 1;").nl();
bodySb.i(1).p("}").nl();
bodySb.i().p("}").nl();
}
if (nums > 0) {
bodySb.i().p("final int n = data.length;").nl();
bodySb.i().p("for(; i<n; ++i) {").nl();
bodySb.i(1).p("NUMS[i" + (cats > 0 ? "-" + cats : "") + "] = Double.isNaN(data[i]) ? 0 : ");
if (model_info().data_info()._normMul != null) {
bodySb.p("(data[i] - NORMSUB[i" + (cats > 0 ? "-" + cats : "") + "])*NORMMUL[i" + (cats > 0 ? "-" + cats : "") + "];").nl();
} else {
bodySb.p("data[i];").nl();
}
bodySb.i(0).p("}").nl();
}
bodySb.i().p("java.util.Arrays.fill(ACTIVATION[0],0);").nl();
if (cats > 0) {
bodySb.i().p("for (i=0; i<ncats; ++i) ACTIVATION[0][CATS[i]] = 1f;").nl();
}
if (nums > 0) {
bodySb.i().p("for (i=0; i<NUMS.length; ++i) {").nl();
bodySb.i(1).p("ACTIVATION[0][CATOFFSETS[CATOFFSETS.length-1] + i] = Double.isNaN(NUMS[i]) ? 0f : (float) NUMS[i];").nl();
bodySb.i().p("}").nl();
}
boolean tanh=(get_params()._activation == DeepLearningParameters.Activation.Tanh || get_params()._activation == DeepLearningParameters.Activation.TanhWithDropout);
boolean relu=(get_params()._activation == DeepLearningParameters.Activation.Rectifier || get_params()._activation == DeepLearningParameters.Activation.RectifierWithDropout);
boolean maxout=(get_params()._activation == DeepLearningParameters.Activation.Maxout || get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout);
final String stopping = get_params()._autoencoder ? "(i<=ACTIVATION.length-1)" : "(i<ACTIVATION.length-1)";
// make prediction: forward propagation
bodySb.i().p("for (i=1; i<ACTIVATION.length; ++i) {").nl();
bodySb.i(1).p("java.util.Arrays.fill(ACTIVATION[i],0f);").nl();
if (maxout) {
bodySb.i(1).p("float rmax = 0;").nl();
}
bodySb.i(1).p("for (int r=0; r<ACTIVATION[i].length; ++r) {").nl();
bodySb.i(2).p("final int cols = ACTIVATION[i-1].length;").nl();
if (maxout) {
bodySb.i(2).p("float cmax = Float.NEGATIVE_INFINITY;").nl();
}
bodySb.i(2).p("for (int c=0; c<cols; ++c) {").nl();
if (!maxout) {
bodySb.i(3).p("ACTIVATION[i][r] += ACTIVATION[i-1][c] * WEIGHT[i][r*cols+c];").nl();
} else {
bodySb.i(3).p("if " + stopping + " cmax = Math.max(ACTIVATION[i-1][c] * WEIGHT[i][r*cols+c], cmax);").nl();
bodySb.i(3).p("else ACTIVATION[i][r] += ACTIVATION[i-1][c] * WEIGHT[i][r*cols+c];").nl();
}
bodySb.i(2).p("}").nl();
if (maxout) {
bodySb.i(2).p("if "+ stopping +" ACTIVATION[i][r] = Float.isInfinite(cmax) ? 0f : cmax;").nl();
}
bodySb.i(2).p("ACTIVATION[i][r] += BIAS[i][r];").nl();
if (maxout) {
bodySb.i(2).p("if " + stopping + " rmax = Math.max(rmax, ACTIVATION[i][r]);").nl();
}
bodySb.i(1).p("}").nl();
if (!maxout) bodySb.i(1).p("if " + stopping + " {").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; ++r) {").nl();
if (tanh) {
bodySb.i(3).p("ACTIVATION[i][r] = 1f - 2f / (1f + (float)Math.exp(2*ACTIVATION[i][r]));").nl();
} else if (relu) {
bodySb.i(3).p("ACTIVATION[i][r] = Math.max(0f, ACTIVATION[i][r]);").nl();
} else if (maxout) {
bodySb.i(3).p("if (rmax > 1 ) ACTIVATION[i][r] /= rmax;").nl();
}
if (get_params()._hidden_dropout_ratios != null) {
bodySb.i(3).p("if (i<ACTIVATION.length-1) {").nl();
bodySb.i(4).p("ACTIVATION[i][r] *= HIDDEN_DROPOUT_RATIOS[i-1];").nl();
bodySb.i(3).p("}").nl();
}
// if (maxout) bodySb.i(1).p("}").nl();
bodySb.i(2).p("}").nl();
if (!maxout) bodySb.i(1).p("}").nl();
if (_output.isClassifier()) {
bodySb.i(1).p("if (i == ACTIVATION.length-1) {").nl();
// softmax
bodySb.i(2).p("float max = ACTIVATION[i][0];").nl();
bodySb.i(2).p("for (int r=1; r<ACTIVATION[i].length; r++) {").nl();
bodySb.i(3).p("if (ACTIVATION[i][r]>max) max = ACTIVATION[i][r];").nl();
bodySb.i(2).p("}").nl();
bodySb.i(2).p("float scale = 0f;").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; r++) {").nl();
bodySb.i(3).p("ACTIVATION[i][r] = (float) Math.exp(ACTIVATION[i][r] - max);").nl();
bodySb.i(3).p("scale += ACTIVATION[i][r];").nl();
bodySb.i(2).p("}").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; r++) {").nl();
bodySb.i(3).p("if (Float.isNaN(ACTIVATION[i][r]))").nl();
bodySb.i(4).p("throw new RuntimeException(\"Numerical instability, predicted NaN.\");").nl();
bodySb.i(3).p("ACTIVATION[i][r] /= scale;").nl();
bodySb.i(3).p("preds[r+1] = ACTIVATION[i][r];").nl();
bodySb.i(2).p("}").nl();
bodySb.i(1).p("}").nl();
bodySb.i().p("}").nl();
} else if (!get_params()._autoencoder) { //Regression
bodySb.i(1).p("if (i == ACTIVATION.length-1) {").nl();
// regression: set preds[1], FillPreds0 will put it into preds[0]
if (model_info().data_info()._normRespMul != null) {
bodySb.i(2).p("preds[1] = (ACTIVATION[i][0] / NORMRESPMUL[0] + NORMRESPSUB[0]);").nl();
}
else {
bodySb.i(2).p("preds[1] = ACTIVATION[i][0];").nl();
}
bodySb.i(2).p("if (Double.isNaN(preds[1])) throw new RuntimeException(\"Predicted regression target NaN!\");").nl();
bodySb.i(1).p("}").nl();
bodySb.i().p("}").nl();
} else { //AutoEncoder
bodySb.i(1).p("if (i == ACTIVATION.length-1) {").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; r++) {").nl();
bodySb.i(3).p("if (Float.isNaN(ACTIVATION[i][r]))").nl();
bodySb.i(4).p("throw new RuntimeException(\"Numerical instability, reconstructed NaN.\");").nl();
bodySb.i(3).p("preds[r] = ACTIVATION[i][r];").nl();
bodySb.i(2).p("}").nl();
if (model_info().data_info()._nums > 0) {
int ns = model_info().data_info().numStart();
bodySb.i(2).p("for (int k=" + ns + "; k<" + model_info().data_info().fullN() + "; ++k) {").nl();
bodySb.i(3).p("preds[k] = preds[k] / NORMMUL[k-" + ns + "] + NORMSUB[k-" + ns + "];").nl();
bodySb.i(2).p("}").nl();
}
bodySb.i(1).p("}").nl();
bodySb.i().p("}").nl();
// DEBUGGING
// bodySb.i().p("System.out.println(java.util.Arrays.toString(data));").nl();
// bodySb.i().p("System.out.println(java.util.Arrays.toString(ACTIVATION[0]));").nl();
// bodySb.i().p("System.out.println(java.util.Arrays.toString(ACTIVATION[ACTIVATION.length-1]));").nl();
// bodySb.i().p("System.out.println(java.util.Arrays.toString(preds));").nl();
// bodySb.i().p("System.out.println(\"\");").nl();
}
fileCtxSb.p(model);
if (_output.autoencoder) return;
if (_output.isClassifier()) {
bodySb.ip("water.util.ModelUtils.correctProbabilities(preds, PRIOR_CLASS_DISTRIB, MODEL_CLASS_DISTRIB);").nl();
bodySb.ip("preds[0] = hex.genmodel.GenModel.getPrediction(preds, data);").nl();
} else {
bodySb.ip("preds[0] = (float)preds[1];").nl();
}
}
transient private final String unstable_msg = "Job was aborted due to observed numerical instability (exponential growth)."
+ "\nTry a different initial distribution, a bounded activation function or adding"
+ "\nregularization with L1, L2 or max_w2 and/or use a smaller learning rate or faster annealing.";
}
|
package org.openwms.tms;
import static org.hamcrest.CoreMatchers.is;
import static org.mockito.BDDMockito.given;
import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.document;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import javax.persistence.EntityManager;
import java.util.Optional;
import org.junit.Test;
import org.openwms.common.TransportUnit;
import org.openwms.tms.api.CreateTransportOrderVO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
/**
* A ChangeStateDocumentation.
*
* @author <a href="mailto:scherrer@openwms.org">Heiko Scherrer</a>
*/
public class ChangeStateDocumentation extends DocumentationBase {
private static final Logger LOGGER = LoggerFactory.getLogger(ChangeStateDocumentation.class);
@Autowired
private EntityManager em;
public
@Test
void turnBackState() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
vo.setState(TransportOrderState.INITIALIZED.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isBadRequest())
.andExpect(jsonPath("messageKey", is(TMSMessageCodes.TO_STATE_CHANGE_BACKWARDS_NOT_ALLOWED)))
.andDo(document("to-patch-state-change-back"))
;
}
public
@Test
void createAnNewOneWhenOneIsAlreadyStarted() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
LOGGER.debug("Order 1: " + vo);
// create a second one that shall wait in INITIALIZED
CreateTransportOrderVO vo2 = createTO();
postTOAndValidate(vo2, NOTLOGGED);
em.flush();
LOGGER.debug("Order 2: " + vo2);
vo2.setState(TransportOrderState.STARTED.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo2))
)
.andExpect(status().isBadRequest())
.andExpect(jsonPath("messageKey", is(TMSMessageCodes.START_TO_NOT_ALLOWED_ALREADY_STARTED_ONE)))
.andDo(document("to-patch-state-change-start-no-allowed-one-exists"))
;
LOGGER.debug("ENDE");
}
public
@Test
void cancellingAnInitializedOne() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
CreateTransportOrderVO vo2 = createTO();
postTOAndValidate(vo2, NOTLOGGED);
vo2.setState(TransportOrderState.CANCELED.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo2))
)
.andExpect(status().isNoContent())
.andDo(document("to-patch-state-change-start-no-allowed-one-exists"))
;
}
public
@Test
void settingAnInitializedOneOnFailure() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
CreateTransportOrderVO vo2 = createTO();
postTOAndValidate(vo2, NOTLOGGED);
vo2.setState(TransportOrderState.ONFAILURE.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo2))
)
.andExpect(status().isNoContent())
.andDo(document("to-patch-state-initialize-to-failure"))
;
}
public
@Test
void finishingAnInitializedOne() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
CreateTransportOrderVO vo2 = createTO();
postTOAndValidate(vo2, NOTLOGGED);
vo2.setState(TransportOrderState.FINISHED.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo2))
)
.andExpect(status().isBadRequest())
.andDo(document("to-patch-state-finish-an-initialized"))
;
}
public
@Test
void startingAnStartedOne() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
vo.setState(TransportOrderState.STARTED.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isNoContent())
.andDo(document("to-patch-state-change"))
;
}
public
@Test
void cancellingAnStartedOne() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
vo.setState(TransportOrderState.CANCELED.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isNoContent())
.andDo(document("to-patch-state-cancel-a-started"))
;
}
public
@Test
void settingAnStartedOneOnFailure() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
vo.setState(TransportOrderState.ONFAILURE.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isNoContent())
.andDo(document("to-patch-state-onfailure-a-started"))
;
}
public
@Test
void finishingAnStartedOne() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
vo.setState(TransportOrderState.FINISHED.toString());
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
// test ...
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isNoContent())
.andDo(document("to-patch-state-finish-a-started"))
;
}
public
@Test
void changingAnFinishedOne() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
vo.setState(TransportOrderState.FINISHED.toString());
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isNoContent())
;
// test ...
vo.setState(TransportOrderState.CANCELED.toString());
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isBadRequest())
.andDo(document("to-patch-state-change-a-finished"))
;
}
public
@Test
void changingAnOnFailureOne() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
vo.setState(TransportOrderState.ONFAILURE.toString());
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isNoContent())
;
// test ...
vo.setState(TransportOrderState.CANCELED.toString());
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isBadRequest())
.andDo(document("to-patch-state-change-an-onfailure"))
;
}
public
@Test
void changingAnCanceledOne() throws Exception {
// setup ...
CreateTransportOrderVO vo = createTO();
postTOAndValidate(vo, NOTLOGGED);
given(commonGateway.getTransportUnit(KNOWN)).willReturn(Optional.of(new TransportUnit(KNOWN, INIT_LOC, ERR_LOC_STRING)));
vo.setState(TransportOrderState.CANCELED.toString());
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isNoContent())
;
// test ...
vo.setState(TransportOrderState.ONFAILURE.toString());
mockMvc.perform(
patch(TMSConstants.ROOT_ENTITIES)
.contentType(MediaType.APPLICATION_JSON)
.content(objectMapper.writeValueAsString(vo))
)
.andExpect(status().isBadRequest())
.andDo(document("to-patch-state-change-a-canceled"))
;
}
}
|
package com.moilioncircle.redis.cluster.watchdog.storage;
import com.moilioncircle.redis.cluster.watchdog.util.Tuples;
import com.moilioncircle.redis.cluster.watchdog.util.type.Tuple2;
import com.moilioncircle.redis.replicator.*;
import com.moilioncircle.redis.replicator.cmd.Command;
import com.moilioncircle.redis.replicator.cmd.CommandListener;
import com.moilioncircle.redis.replicator.cmd.CommandName;
import com.moilioncircle.redis.replicator.cmd.CommandParser;
import com.moilioncircle.redis.replicator.event.Event;
import com.moilioncircle.redis.replicator.io.RedisInputStream;
import com.moilioncircle.redis.replicator.rdb.BaseRdbParser;
import com.moilioncircle.redis.replicator.rdb.DefaultRdbVisitor;
import com.moilioncircle.redis.replicator.rdb.RdbListener;
import com.moilioncircle.redis.replicator.rdb.RdbVisitor;
import com.moilioncircle.redis.replicator.rdb.datatype.*;
import com.moilioncircle.redis.replicator.rdb.module.ModuleParser;
import com.moilioncircle.redis.replicator.util.ByteArray;
import com.moilioncircle.redis.replicator.util.ByteArrayMap;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import static com.moilioncircle.redis.cluster.watchdog.ClusterConstants.CLUSTER_SLOTS;
import static com.moilioncircle.redis.replicator.Constants.*;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* @author Leon Chen
* @since 1.0.0
*/
public class RedisStorageEngine implements StorageEngine {
private volatile boolean readonly;
private AtomicLong size = new AtomicLong(0);
private ConcurrentHashMap<Key, Tuple2<Long, Object>>[] slots;
public RedisStorageEngine() {
this.slots = new ConcurrentHashMap[CLUSTER_SLOTS];
for (int i = 0; i < CLUSTER_SLOTS; i++) slots[i] = new ConcurrentHashMap<>();
}
@Override
public void start() {
}
@Override
public void stop() {
stop(0, TimeUnit.MILLISECONDS);
}
@Override
public void stop(long timeout, TimeUnit unit) {
clear();
}
@Override
public long size() {
return size.get();
}
@Override
public synchronized void clear() {
for (int i = 0; i < CLUSTER_SLOTS; i++) {
clear(i);
}
assert 0 == size.get();
}
@Override
public long size(int slot) {
return slots[slot].size();
}
@Override
public synchronized void clear(int slot) {
int size = slots[slot].size();
slots[slot].clear();
this.size.addAndGet(-size);
}
@Override
public Iterator<byte[]> keys() {
return new Iter();
}
@Override
public Iterator<byte[]> keys(int slot) {
return new SlotIter(slot);
}
@Override
public void delete(byte[] key) {
if (slots[StorageEngine.keyHashSlot(key)].remove(new Key(key)) != null) {
size.decrementAndGet();
}
}
@Override
public Object load(byte[] key) {
return slots[StorageEngine.keyHashSlot(key)].compute(new Key(key), (k, v) -> {
if (v == null || (v.getV1() != 0L && v.getV1() < System.currentTimeMillis())) return Tuples.of(0L, null); else return v;
}).getV2();
}
@Override
public boolean exist(byte[] key) {
return slots[StorageEngine.keyHashSlot(key)].containsKey(new Key(key));
}
@Override
public Class<?> type(byte[] key) {
throw new UnsupportedOperationException();
}
@Override
public void save(byte[] key, Object value, long expire, boolean force) {
slots[StorageEngine.keyHashSlot(key)].compute(new Key(key), (k, v) -> {
if (v == null) {
size.incrementAndGet();
return Tuples.of(expire, value);
} else if (!force) {
return v;
} else {
return Tuples.of(expire, value);
}
});
}
@Override
public byte[] dump(byte[] key) {
throw new UnsupportedOperationException();
}
@Override
public void restore(byte[] key, byte[] serialized, long expire, boolean force) {
Replicator replicator = new RestoreReplicator(new ByteArrayInputStream(serialized), Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
save(key, kv.getValue(), expire, force);
}
});
try { replicator.open(); } catch (IOException e) { }
}
@Override
public boolean readonly() {
return this.readonly;
}
@Override
public void readonly(boolean r) {
this.readonly = r;
}
private class Key {
private final byte[] key;
private Key(final byte[] key) {
this.key = key;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Key key1 = (Key) o;
return Arrays.equals(key, key1.key);
}
@Override
public int hashCode() {
return Arrays.hashCode(key);
}
}
private class SlotIter implements Iterator<byte[]> {
private Iterator<Key> curr;
private SlotIter(int slot) {
this.curr = slots[slot].keySet().iterator();
}
@Override
public boolean hasNext() {
return curr.hasNext();
}
@Override
public byte[] next() {
return curr.next().key;
}
}
private class Iter implements Iterator<byte[]> {
private int idx = 0;
private Iterator<Key> curr;
private Iter() {
this.curr = slots[idx].keySet().iterator();
}
@Override
public boolean hasNext() {
while (true) {
if (curr.hasNext()) return true;
else if (idx + 1 < CLUSTER_SLOTS) {
idx++;
curr = slots[idx].keySet().iterator();
continue;
} else {
return false;
}
}
}
@Override
public byte[] next() {
return curr.next().key;
}
}
public static class RestoreReplicator extends AbstractReplicator {
public RestoreReplicator(InputStream in, Configuration configuration) {
Objects.requireNonNull(in);
Objects.requireNonNull(configuration);
this.configuration = configuration;
this.inputStream = new RedisInputStream(in, this.configuration.getBufferSize());
this.inputStream.setRawByteListeners(this.rawByteListeners);
if (configuration.isUseDefaultExceptionListener())
addExceptionListener(new DefaultExceptionListener());
}
@Override
public void open() throws IOException {
try {
doOpen();
} catch (EOFException ignore) {
} catch (UncheckedIOException e) {
if (!(e.getCause() instanceof EOFException)) throw e;
} finally {
close();
}
}
protected void doOpen() throws IOException {
RestoreRdbVisitor v = new RestoreRdbVisitor(this);
submitEvent(v.rdbLoadObject(this.inputStream, null, v.applyType(this.inputStream), 8));
int version = this.inputStream.readInt(2); long checksum = this.inputStream.readLong(8);
}
@Override
public void close() throws IOException {
doClose();
}
@Override
public void setRdbVisitor(RdbVisitor rdbVisitor) {
throw new UnsupportedOperationException();
}
@Override
public boolean addCommandListener(CommandListener listener) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeCommandListener(CommandListener listener) {
throw new UnsupportedOperationException();
}
@Override
public CommandParser<? extends Command> getCommandParser(CommandName command) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Command> void addCommandParser(CommandName command, CommandParser<T> parser) {
throw new UnsupportedOperationException();
}
@Override
public CommandParser<? extends Command> removeCommandParser(CommandName command) {
throw new UnsupportedOperationException();
}
@Override
public void builtInCommandParserRegister() {
throw new UnsupportedOperationException();
}
private static class RestoreRdbVisitor extends DefaultRdbVisitor {
@Override
public Event applyString(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueString o0 = new KeyStringValueString();
byte[] val = parser.rdbLoadEncodedStringObject().first();
o0.setValueRdbType(RDB_TYPE_STRING);
o0.setValue(new String(val, UTF_8));
o0.setRawValue(val);
o0.setDb(db);
return o0;
}
@Override
public Event applyList(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueList o1 = new KeyStringValueList();
long len = parser.rdbLoadLen().len;
List<String> list = new ArrayList<>();
List<byte[]> rawList = new ArrayList<>();
for (int i = 0; i < len; i++) {
byte[] element = parser.rdbLoadEncodedStringObject().first();
list.add(new String(element, UTF_8));
rawList.add(element);
}
o1.setValueRdbType(RDB_TYPE_LIST);
o1.setValue(list);
o1.setRawValue(rawList);
o1.setDb(db);
return o1;
}
@Override
public Event applySet(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueSet o2 = new KeyStringValueSet();
long len = parser.rdbLoadLen().len;
Set<String> set = new LinkedHashSet<>();
Set<byte[]> rawSet = new LinkedHashSet<>();
for (int i = 0; i < len; i++) {
byte[] element = parser.rdbLoadEncodedStringObject().first();
set.add(new String(element, UTF_8));
rawSet.add(element);
}
o2.setValueRdbType(RDB_TYPE_SET);
o2.setValue(set);
o2.setRawValue(rawSet);
o2.setDb(db);
return o2;
}
@Override
public Event applyZSet(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueZSet o3 = new KeyStringValueZSet();
long len = parser.rdbLoadLen().len;
Set<ZSetEntry> zset = new LinkedHashSet<>();
while (len > 0) {
byte[] element = parser.rdbLoadEncodedStringObject().first();
double score = parser.rdbLoadDoubleValue();
zset.add(new ZSetEntry(new String(element, UTF_8), score, element));
len
}
o3.setValueRdbType(RDB_TYPE_ZSET);
o3.setValue(zset);
o3.setDb(db);
return o3;
}
@Override
public Event applyZSet2(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueZSet o5 = new KeyStringValueZSet();
long len = parser.rdbLoadLen().len;
Set<ZSetEntry> zset = new LinkedHashSet<>();
while (len > 0) {
byte[] element = parser.rdbLoadEncodedStringObject().first();
double score = parser.rdbLoadBinaryDoubleValue();
zset.add(new ZSetEntry(new String(element, UTF_8), score, element));
len
}
o5.setValueRdbType(RDB_TYPE_ZSET_2);
o5.setValue(zset);
o5.setDb(db);
return o5;
}
@Override
public Event applyHash(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueHash o4 = new KeyStringValueHash();
long len = parser.rdbLoadLen().len;
Map<String, String> map = new LinkedHashMap<>();
ByteArrayMap<byte[]> rawMap = new ByteArrayMap<>();
while (len > 0) {
byte[] field = parser.rdbLoadEncodedStringObject().first();
byte[] value = parser.rdbLoadEncodedStringObject().first();
map.put(new String(field, UTF_8), new String(value, UTF_8));
rawMap.put(field, value);
len
}
o4.setValueRdbType(RDB_TYPE_HASH);
o4.setValue(map);
o4.setRawValue(rawMap);
o4.setDb(db);
return o4;
}
@Override
public Event applyHashZipMap(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueHash o9 = new KeyStringValueHash();
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new com.moilioncircle.redis.replicator.io.ByteArrayInputStream(aux));
Map<String, String> map = new LinkedHashMap<>();
ByteArrayMap<byte[]> rawMap = new ByteArrayMap<>();
BaseRdbParser.LenHelper.zmlen(stream); // zmlen
while (true) {
int zmEleLen = BaseRdbParser.LenHelper.zmElementLen(stream);
if (zmEleLen == 255) {
o9.setValueRdbType(RDB_TYPE_HASH_ZIPMAP);
o9.setValue(map);
o9.setRawValue(rawMap);
o9.setDb(db);
return o9;
}
byte[] field = BaseRdbParser.StringHelper.bytes(stream, zmEleLen);
zmEleLen = BaseRdbParser.LenHelper.zmElementLen(stream);
if (zmEleLen == 255) {
//value is null
map.put(new String(field, UTF_8), null);
rawMap.put(field, null);
o9.setValueRdbType(RDB_TYPE_HASH_ZIPMAP);
o9.setValue(map);
o9.setRawValue(rawMap);
o9.setDb(db);
return o9;
}
int free = BaseRdbParser.LenHelper.free(stream);
byte[] value = BaseRdbParser.StringHelper.bytes(stream, zmEleLen);
BaseRdbParser.StringHelper.skip(stream, free);
map.put(new String(field, UTF_8), new String(value, UTF_8));
rawMap.put(field, value);
}
}
@Override
public Event applyListZipList(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueList o10 = new KeyStringValueList();
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new com.moilioncircle.redis.replicator.io.ByteArrayInputStream(aux));
List<String> list = new ArrayList<>();
List<byte[]> rawList = new ArrayList<>();
BaseRdbParser.LenHelper.zlbytes(stream); // zlbytes
BaseRdbParser.LenHelper.zltail(stream); // zltail
int zllen = BaseRdbParser.LenHelper.zllen(stream);
for (int i = 0; i < zllen; i++) {
byte[] e = BaseRdbParser.StringHelper.zipListEntry(stream);
list.add(new String(e, UTF_8));
rawList.add(e);
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o10.setValueRdbType(RDB_TYPE_LIST_ZIPLIST);
o10.setValue(list);
o10.setRawValue(rawList);
o10.setDb(db);
return o10;
}
@Override
public Event applySetIntSet(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueSet o11 = new KeyStringValueSet();
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new com.moilioncircle.redis.replicator.io.ByteArrayInputStream(aux));
Set<String> set = new LinkedHashSet<>();
Set<byte[]> rawSet = new LinkedHashSet<>();
int encoding = BaseRdbParser.LenHelper.encoding(stream);
long lenOfContent = BaseRdbParser.LenHelper.lenOfContent(stream);
for (long i = 0; i < lenOfContent; i++) {
switch (encoding) {
case 2:
String element = String.valueOf(stream.readInt(2));
set.add(element);
rawSet.add(element.getBytes());
break;
case 4:
element = String.valueOf(stream.readInt(4));
set.add(element);
rawSet.add(element.getBytes());
break;
case 8:
element = String.valueOf(stream.readLong(8));
set.add(element);
rawSet.add(element.getBytes());
break;
default:
throw new AssertionError("expect encoding [2,4,8] but:" + encoding);
}
}
o11.setValueRdbType(RDB_TYPE_SET_INTSET);
o11.setValue(set);
o11.setRawValue(rawSet);
o11.setDb(db);
return o11;
}
@Override
public Event applyZSetZipList(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueZSet o12 = new KeyStringValueZSet();
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new com.moilioncircle.redis.replicator.io.ByteArrayInputStream(aux));
Set<ZSetEntry> zset = new LinkedHashSet<>();
BaseRdbParser.LenHelper.zlbytes(stream); // zlbytes
BaseRdbParser.LenHelper.zltail(stream); // zltail
int zllen = BaseRdbParser.LenHelper.zllen(stream);
while (zllen > 0) {
byte[] element = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen
double score = Double.valueOf(new String(BaseRdbParser.StringHelper.zipListEntry(stream), UTF_8));
zllen
zset.add(new ZSetEntry(new String(element, UTF_8), score, element));
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o12.setValueRdbType(RDB_TYPE_ZSET_ZIPLIST);
o12.setValue(zset);
o12.setDb(db);
return o12;
}
@Override
public Event applyHashZipList(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueHash o13 = new KeyStringValueHash();
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new com.moilioncircle.redis.replicator.io.ByteArrayInputStream(aux));
Map<String, String> map = new LinkedHashMap<>();
ByteArrayMap<byte[]> rawMap = new ByteArrayMap<>();
BaseRdbParser.LenHelper.zlbytes(stream); // zlbytes
BaseRdbParser.LenHelper.zltail(stream); // zltail
int zllen = BaseRdbParser.LenHelper.zllen(stream);
while (zllen > 0) {
byte[] field = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen
byte[] value = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen
map.put(new String(field, UTF_8), new String(value, UTF_8));
rawMap.put(field, value);
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o13.setValueRdbType(RDB_TYPE_HASH_ZIPLIST);
o13.setValue(map);
o13.setRawValue(rawMap);
o13.setDb(db);
return o13;
}
@Override
public Event applyListQuickList(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueList o14 = new KeyStringValueList();
long len = parser.rdbLoadLen().len;
List<String> list = new ArrayList<>();
List<byte[]> rawList = new ArrayList<>();
for (int i = 0; i < len; i++) {
ByteArray element = parser.rdbGenericLoadStringObject(RDB_LOAD_NONE);
RedisInputStream stream = new RedisInputStream(new com.moilioncircle.redis.replicator.io.ByteArrayInputStream(element));
BaseRdbParser.LenHelper.zlbytes(stream); // zlbytes
BaseRdbParser.LenHelper.zltail(stream); // zltail
int zllen = BaseRdbParser.LenHelper.zllen(stream);
for (int j = 0; j < zllen; j++) {
byte[] e = BaseRdbParser.StringHelper.zipListEntry(stream);
list.add(new String(e, UTF_8));
rawList.add(e);
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
}
o14.setValueRdbType(RDB_TYPE_LIST_QUICKLIST);
o14.setValue(list);
o14.setRawValue(rawList);
o14.setDb(db);
return o14;
}
@Override
public Event applyModule(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueModule o6 = new KeyStringValueModule();
char[] c = new char[9];
long moduleid = parser.rdbLoadLen().len;
for (int i = 0; i < c.length; i++) {
c[i] = MODULE_SET[(int) (moduleid >>> (10 + (c.length - 1 - i) * 6) & 63)];
}
String moduleName = new String(c);
int moduleVersion = (int) (moduleid & 1023);
ModuleParser<? extends Module> moduleParser = lookupModuleParser(moduleName, moduleVersion);
if (moduleParser == null) {
throw new NoSuchElementException("module[" + moduleName + "," + moduleVersion + "] not exist.");
}
o6.setValueRdbType(RDB_TYPE_MODULE);
o6.setValue(moduleParser.parse(in, 1));
o6.setDb(db);
return o6;
}
@Override
public Event applyModule2(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueModule o7 = new KeyStringValueModule();
char[] c = new char[9];
long moduleid = parser.rdbLoadLen().len;
for (int i = 0; i < c.length; i++) {
c[i] = MODULE_SET[(int) (moduleid >>> (10 + (c.length - 1 - i) * 6) & 63)];
}
String moduleName = new String(c);
int moduleVersion = (int) (moduleid & 1023);
ModuleParser<? extends Module> moduleParser = lookupModuleParser(moduleName, moduleVersion);
if (moduleParser == null) {
throw new NoSuchElementException("module[" + moduleName + "," + moduleVersion + "] not exist.");
}
o7.setValueRdbType(RDB_TYPE_MODULE_2);
o7.setValue(moduleParser.parse(in, 2));
o7.setDb(db);
long eof = parser.rdbLoadLen().len;
if (eof != RDB_MODULE_OPCODE_EOF) {
throw new UnsupportedOperationException("The RDB file contains module data for the module '" + moduleName + "' that is not terminated by the proper module value EOF marker");
}
return o7;
}
public RestoreRdbVisitor(Replicator replicator) {
super(replicator);
}
public KeyValuePair<?> rdbLoadObject(RedisInputStream in, DB db, int valueType, int version) throws IOException {
return super.rdbLoadObject(in, db, valueType, version);
}
}
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package gitgraph;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
* @author pauldoo
*/
public final class Main {
private static final boolean includeBlobs = true;
private static final Pattern objectInRevList = Pattern.compile("^([0123456789abcdef]{40})( (.*))?$");
private static final Pattern parentInCommit = Pattern.compile("^parent ([0123456789abcdef]{40})$");
private static final Pattern treeInCommit = Pattern.compile("^tree ([0123456789abcdef]{40})$");
private static final Pattern treeInTree = Pattern.compile("^[0123456789]{6} tree ([0123456789abcdef]{40}).*$");
private static final Pattern blobInTree = Pattern.compile("^[0123456789]{6} blob ([0123456789abcdef]{40}).*$");
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
final String outputFilename = args[0];
Process process = null;
try {
{
String command[] = new String[]{"git", "rev-list", "--objects", "--all"};
process = Runtime.getRuntime().exec(command);
process.getOutputStream().close();
process.getErrorStream().close();
}
BufferedReader r = new BufferedReader(new InputStreamReader(new BufferedInputStream(process.getInputStream())));
PrintWriter dotOutput = new PrintWriter(new BufferedOutputStream(new FileOutputStream(outputFilename)));
dotOutput.println("strict digraph {");
{
String line;
while ((line = r.readLine()) != null) {
Matcher m = objectInRevList.matcher(line);
if (m.matches()) {
String fullHash = m.group(1);
String hintName = (m.groupCount() >= 2) ? m.group(2) : null;
processObject(fullHash, hintName, dotOutput);
} else {
System.err.println("Warning: I didn't understand '" + line + "'");
}
}
}
dotOutput.println("}");
dotOutput.close();
if (dotOutput.checkError()) {
throw new IOException("Error writing output file.");
}
process.waitFor();
process = null;
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (process != null) {
process.destroy();
}
}
}
private static void processObject(String objectHash, String hintName, PrintWriter dotOutput) throws Exception
{
Process process = null;
try {
{
String command[] = new String[]{"git", "cat-file", "-t", objectHash};
process = Runtime.getRuntime().exec(command);
process.getOutputStream().close();
process.getErrorStream().close();
}
BufferedReader r = new BufferedReader(new InputStreamReader(new BufferedInputStream(process.getInputStream())));
String type = r.readLine();
process.waitFor();
process = null;
if ("commit".equals(type)) {
processCommit(objectHash, dotOutput);
} else if ("tree".equals(type)) {
processTree(objectHash, hintName, dotOutput);
} else if (includeBlobs && "blob".equals(type)) {
processBlob(objectHash, hintName, dotOutput);
} else {
System.err.println("Warning: Did not recognise object type '" + type + "'");
}
} finally {
if (process != null) {
process.destroy();
}
}
}
private static void processCommit(String hash, PrintWriter dotOutput) throws Exception
{
Process process = null;
try {
{
String command[] = new String[]{"git", "cat-file", "-p", hash};
process = Runtime.getRuntime().exec(command);
process.getOutputStream().close();
process.getErrorStream().close();
}
BufferedReader commitReader = new BufferedReader(new InputStreamReader(new BufferedInputStream(process.getInputStream())));
String line;
while ((line = commitReader.readLine()).equals("") == false) {
Matcher parent = parentInCommit.matcher(line);
if (parent.matches()) {
dotOutput.println(" \"" + hash + "\" -> \"" + parent.group(1) + "\";");
}
Matcher tree = treeInCommit.matcher(line);
if (tree.matches()) {
dotOutput.println(" \"" + hash + "\" -> \"" + tree.group(1) + "\";");
}
}
String firstLineOfCommitMessage = commitReader.readLine();
if (firstLineOfCommitMessage == null) {
firstLineOfCommitMessage = "";
}
dotOutput.println(" \"" + hash + "\" [label=\"" + hash.substring(0, 7) + "\\n" + firstLineOfCommitMessage + "\",shape=ellipse];");
process.waitFor();
process = null;
} finally {
if (process != null) {
process.destroy();
}
}
}
private static void processTree(String hash, String hintName, PrintWriter dotOutput) throws Exception
{
dotOutput.println(" \"" + hash + "\" [label=\"" + hash.substring(0, 7) + "\\n" + hintName + "\",shape=triangle];");
Process process = null;
try {
{
String command[] = new String[]{"git", "cat-file", "-p", hash};
process = Runtime.getRuntime().exec(command);
process.getOutputStream().close();
process.getErrorStream().close();
}
BufferedReader treeReader = new BufferedReader(new InputStreamReader(new BufferedInputStream(process.getInputStream())));
String line;
while ((line = treeReader.readLine()) != null) {
Matcher tree = treeInTree.matcher(line);
if (tree.matches()) {
dotOutput.println(" \"" + hash + "\" -> \"" + tree.group(1) + "\";");
}
Matcher blob = blobInTree.matcher(line);
if (includeBlobs && blob.matches()) {
dotOutput.println(" \"" + hash + "\" -> \"" + blob.group(1) + "\";");
}
}
process.waitFor();
process = null;
} finally {
if (process != null) {
process.destroy();
}
}
}
private static void processBlob(String hash, String hintName, PrintWriter dotOutput)
{
dotOutput.println(" \"" + hash + "\" [label=\"" + hash.substring(0, 7) + "\\n" + hintName + "\",shape=rectangle];");
}
}
|
package hudson.model;
import hudson.util.StreamTaskListener;
import hudson.util.NullStream;
import hudson.util.FormValidation;
import hudson.Launcher;
import hudson.Extension;
import hudson.EnvVars;
import hudson.slaves.NodeSpecific;
import hudson.tools.ToolInstallation;
import hudson.tools.ToolDescriptor;
import hudson.tools.ToolProperty;
import hudson.tools.JDKInstaller;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.List;
import java.util.Arrays;
import java.util.Collections;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
/**
* Information about JDK installation.
*
* @author Kohsuke Kawaguchi
*/
public final class JDK extends ToolInstallation implements NodeSpecific<JDK>, EnvironmentSpecific<JDK> {
@Deprecated // kept for backward compatibility - use getHome() instead
private String javaHome;
public JDK(String name, String javaHome) {
super(name, javaHome, Collections.<ToolProperty<?>>emptyList());
}
@DataBoundConstructor
public JDK(String name, String home, List<? extends ToolProperty<?>> properties) {
super(name, home, properties);
}
/**
* install directory.
*
* @deprecated as of 1.304
* Use {@link #getHome()}
*/
public String getJavaHome() {
return getHome();
}
@SuppressWarnings({"deprecation"})
public @Override String getHome() {
if (javaHome != null) return javaHome;
return super.getHome();
}
/**
* Gets the path to the bin directory.
*/
public File getBinDir() {
return new File(getHome(),"bin");
}
/**
* Gets the path to 'java'.
*/
private File getExecutable() {
String execName;
if(File.separatorChar=='\\')
execName = "java.exe";
else
execName = "java";
return new File(getHome(),"bin/"+execName);
}
/**
* Returns true if the executable exists.
*/
public boolean getExists() {
return getExecutable().exists();
}
/**
* Sets PATH and JAVA_HOME from this JDK.
*/
public void buildEnvVars(Map<String,String> env) {
// see EnvVars javadoc for why this adss PATH.
env.put("PATH+JDK",getHome()+"/bin");
env.put("JAVA_HOME",getHome());
}
public JDK forNode(Node node, TaskListener log) throws IOException, InterruptedException {
return new JDK(getName(), translateFor(node, log));
}
public JDK forEnvironment(EnvVars environment) {
return new JDK(getName(), environment.expand(getHome()));
}
/**
* Checks if "java" is in PATH on the given node.
*
* <p>
* If it's not, then the user must specify a configured JDK,
* so this is often useful for form field validation.
*/
public static boolean isDefaultJDKValid(Node n) {
try {
TaskListener listener = new StreamTaskListener(new NullStream());
Launcher launcher = n.createLauncher(listener);
return launcher.launch().cmds("java","-fullversion").stdout(listener).join()==0;
} catch (IOException e) {
return false;
} catch (InterruptedException e) {
return false;
}
}
@Extension
public static class DescriptorImpl extends ToolDescriptor<JDK> {
public String getDisplayName() {
return "JDK"; // XXX I18N
}
public @Override JDK[] getInstallations() {
return Hudson.getInstance().getJDKs().toArray(new JDK[0]);
}
// this isn't really synchronized well since the list is Hudson.jdks :(
public @Override synchronized void setInstallations(JDK... jdks) {
List<JDK> list = Hudson.getInstance().getJDKs();
list.clear();
list.addAll(Arrays.asList(jdks));
}
@Override
public List<JDKInstaller> getDefaultInstallers() {
return Collections.singletonList(new JDKInstaller(null,false));
}
/**
* Checks if the JAVA_HOME is a valid JAVA_HOME path.
*/
public FormValidation doCheckHome(@QueryParameter File value) {
// this can be used to check the existence of a file on the server, so needs to be protected
Hudson.getInstance().checkPermission(Hudson.ADMINISTER);
if(value.getPath().equals(""))
return FormValidation.ok();
if(!value.isDirectory())
return FormValidation.error(Messages.Hudson_NotADirectory(value));
File toolsJar = new File(value,"lib/tools.jar");
File mac = new File(value,"lib/dt.jar");
if(!toolsJar.exists() && !mac.exists())
return FormValidation.error(Messages.Hudson_NotJDKDir(value));
return FormValidation.ok();
}
}
}
|
package hex.deeplearning;
import hex.*;
import static hex.deeplearning.DeepLearning.makeDataInfo;
import hex.quantile.Quantile;
import hex.quantile.QuantileModel;
import hex.schemas.DeepLearningModelV3;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import water.*;
import water.api.ModelSchema;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import static hex.ModelMetrics.calcVarImp;
import static java.lang.Double.isNaN;
/**
* The Deep Learning model
* It contains a DeepLearningModelInfo with the most up-to-date model,
* a scoring history, as well as some helpers to indicate the progress
*/
public class DeepLearningModel extends SupervisedModel<DeepLearningModel,DeepLearningModel.DeepLearningParameters,DeepLearningModel.DeepLearningModelOutput> implements Model.DeepFeatures {
public static class DeepLearningParameters extends SupervisedModel.SupervisedParameters {
@Override public double missingColumnsType() { return _sparse ? 0 : Double.NaN; }
@Override protected boolean defaultDropConsCols() { return true; /*FIXME: should be _ignore_const_cols; */ }
// public int _n_folds;
public int getNumFolds() { return 0; }
public boolean _keep_cross_validation_splits;
/**
* A model key associated with a previously trained Deep Learning
* model. This option allows users to build a new model as a
* continuation of a previously generated model.
*/
public Key _checkpoint;
/**
* If enabled, store the best model under the destination key of this model at the end of training.
* Only applicable if training is not cancelled.
*/
public boolean _overwrite_with_best_model = true;
public boolean _autoencoder = false;
public boolean _use_all_factor_levels = true;
/*Neural Net Topology*/
/**
* The activation function (non-linearity) to be used the neurons in the hidden layers.
* Tanh: Hyperbolic tangent function (same as scaled and shifted sigmoid).
* Rectifier: Chooses the maximum of (0, x) where x is the input value.
* Maxout: Choose the maximum coordinate of the input vector.
* With Dropout: Zero out a random user-given fraction of the
* incoming weights to each hidden layer during training, for each
* training row. This effectively trains exponentially many models at
* once, and can improve generalization.
*/
public Activation _activation = Activation.Rectifier;
/**
* The number and size of each hidden layer in the model.
* For example, if a user specifies "100,200,100" a model with 3 hidden
* layers will be produced, and the middle hidden layer will have 200
* neurons.
*/
public int[] _hidden = new int[] { 200, 200 };
/**
* The number of passes over the training dataset to be carried out.
* It is recommended to start with lower values for initial experiments.
* This value can be modified during checkpoint restarts and allows continuation
* of selected models.
*/
public double _epochs = 10;
/**
* The number of training data rows to be processed per iteration. Note that
* independent of this parameter, each row is used immediately to update the model
* with (online) stochastic gradient descent. This parameter controls the
* synchronization period between nodes in a distributed environment and the
* frequency at which scoring and model cancellation can happen. For example, if
* it is set to 10,000 on H2O running on 4 nodes, then each node will
* process 2,500 rows per iteration, sampling randomly from their local data.
* Then, model averaging between the nodes takes place, and scoring can happen
* (dependent on scoring interval and duty factor). Special values are 0 for
* one epoch per iteration, -1 for processing the maximum amount of data
* per iteration (if **replicate training data** is enabled, N epochs
* will be trained per iteration on N nodes, otherwise one epoch). Special value
* of -2 turns on automatic mode (auto-tuning).
*/
public long _train_samples_per_iteration = -2;
public double _target_ratio_comm_to_comp = 0.02;
/**
* The random seed controls sampling and initialization. Reproducible
* results are only expected with single-threaded operation (i.e.,
* when running on one node, turning off load balancing and providing
* a small dataset that fits in one chunk). In general, the
* multi-threaded asynchronous updates to the model parameters will
* result in (intentional) race conditions and non-reproducible
* results. Note that deterministic sampling and initialization might
* still lead to some weak sense of determinism in the model.
*/
public long _seed = RandomUtils.getRNG(System.currentTimeMillis()).nextLong();
/*Adaptive Learning Rate*/
/**
* The implemented adaptive learning rate algorithm (ADADELTA) automatically
* combines the benefits of learning rate annealing and momentum
* training to avoid slow convergence. Specification of only two
* parameters (rho and epsilon) simplifies hyper parameter search.
* In some cases, manually controlled (non-adaptive) learning rate and
* momentum specifications can lead to better results, but require the
* specification (and hyper parameter search) of up to 7 parameters.
* If the model is built on a topology with many local minima or
* long plateaus, it is possible for a constant learning rate to produce
* sub-optimal results. Learning rate annealing allows digging deeper into
* local minima, while rate decay allows specification of different
* learning rates per layer. When the gradient is being estimated in
* a long valley in the optimization landscape, a large learning rate
* can cause the gradient to oscillate and move in the wrong
* direction. When the gradient is computed on a relatively flat
* surface with small learning rates, the model can converge far
* slower than necessary.
*/
public boolean _adaptive_rate = true;
/**
* The first of two hyper parameters for adaptive learning rate (ADADELTA).
* It is similar to momentum and relates to the memory to prior weight updates.
* Typical values are between 0.9 and 0.999.
* This parameter is only active if adaptive learning rate is enabled.
*/
public double _rho = 0.99;
/**
* The second of two hyper parameters for adaptive learning rate (ADADELTA).
* It is similar to learning rate annealing during initial training
* and momentum at later stages where it allows forward progress.
* Typical values are between 1e-10 and 1e-4.
* This parameter is only active if adaptive learning rate is enabled.
*/
public double _epsilon = 1e-8;
/*Learning Rate*/
/**
* When adaptive learning rate is disabled, the magnitude of the weight
* updates are determined by the user specified learning rate
* (potentially annealed), and are a function of the difference
* between the predicted value and the target value. That difference,
* generally called delta, is only available at the output layer. To
* correct the output at each hidden layer, back propagation is
* used. Momentum modifies back propagation by allowing prior
* iterations to influence the current update. Using the momentum
* parameter can aid in avoiding local minima and the associated
* instability. Too much momentum can lead to instabilities, that's
* why the momentum is best ramped up slowly.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _rate = .005;
/**
* Learning rate annealing reduces the learning rate to "freeze" into
* local minima in the optimization landscape. The annealing rate is the
* inverse of the number of training samples it takes to cut the learning rate in half
* (e.g., 1e-6 means that it takes 1e6 training samples to halve the learning rate).
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _rate_annealing = 1e-6;
/**
* The learning rate decay parameter controls the change of learning rate across layers.
* For example, assume the rate parameter is set to 0.01, and the rate_decay parameter is set to 0.5.
* Then the learning rate for the weights connecting the input and first hidden layer will be 0.01,
* the learning rate for the weights connecting the first and the second hidden layer will be 0.005,
* and the learning rate for the weights connecting the second and third hidden layer will be 0.0025, etc.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _rate_decay = 1.0;
/*Momentum*/
/**
* The momentum_start parameter controls the amount of momentum at the beginning of training.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _momentum_start = 0;
/**
* The momentum_ramp parameter controls the amount of learning for which momentum increases
* (assuming momentum_stable is larger than momentum_start). The ramp is measured in the number
* of training samples.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _momentum_ramp = 1e6;
/**
* The momentum_stable parameter controls the final momentum value reached after momentum_ramp training samples.
* The momentum used for training will remain the same for training beyond reaching that point.
* This parameter is only active if adaptive learning rate is disabled.
*/
public double _momentum_stable = 0;
/**
* The Nesterov accelerated gradient descent method is a modification to
* traditional gradient descent for convex functions. The method relies on
* gradient information at various points to build a polynomial approximation that
* minimizes the residuals in fewer iterations of the descent.
* This parameter is only active if adaptive learning rate is disabled.
*/
public boolean _nesterov_accelerated_gradient = true;
/*Regularization*/
/**
* A fraction of the features for each training row to be omitted from training in order
* to improve generalization (dimension sampling).
*/
public double _input_dropout_ratio = 0.0;
/**
* A fraction of the inputs for each hidden layer to be omitted from training in order
* to improve generalization. Defaults to 0.5 for each hidden layer if omitted.
*/
public double[] _hidden_dropout_ratios;
/**
* A regularization method that constrains the absolute value of the weights and
* has the net effect of dropping some weights (setting them to zero) from a model
* to reduce complexity and avoid overfitting.
*/
public double _l1 = 0.0;
/**
* A regularization method that constrdains the sum of the squared
* weights. This method introduces bias into parameter estimates, but
* frequently produces substantial gains in modeling as estimate variance is
* reduced.
*/
public double _l2 = 0.0;
/**
* A maximum on the sum of the squared incoming weights into
* any one neuron. This tuning parameter is especially useful for unbound
* activation functions such as Maxout or Rectifier.
*/
public float _max_w2 = Float.POSITIVE_INFINITY;
/*Initialization*/
/**
* The distribution from which initial weights are to be drawn. The default
* option is an optimized initialization that considers the size of the network.
* The "uniform" option uses a uniform distribution with a mean of 0 and a given
* interval. The "normal" option draws weights from the standard normal
* distribution with a mean of 0 and given standard deviation.
*/
public InitialWeightDistribution _initial_weight_distribution = InitialWeightDistribution.UniformAdaptive;
/**
* The scale of the distribution function for Uniform or Normal distributions.
* For Uniform, the values are drawn uniformly from -initial_weight_scale...initial_weight_scale.
* For Normal, the values are drawn from a Normal distribution with a standard deviation of initial_weight_scale.
*/
public double _initial_weight_scale = 1.0;
/**
* The loss (error) function to be minimized by the model.
* Cross Entropy loss is used when the model output consists of independent
* hypotheses, and the outputs can be interpreted as the probability that each
* hypothesis is true. Cross entropy is the recommended loss function when the
* target values are class labels, and especially for imbalanced data.
* It strongly penalizes error in the prediction of the actual class label.
* Mean Square loss is used when the model output are continuous real values, but can
* be used for classification as well (where it emphasizes the error on all
* output classes, not just for the actual class).
*/
public Loss _loss = Loss.Automatic;
/*Scoring*/
/**
* The minimum time (in seconds) to elapse between model scoring. The actual
* interval is determined by the number of training samples per iteration and the scoring duty cycle.
*/
public double _score_interval = 5;
/**
* The number of training dataset points to be used for scoring. Will be
* randomly sampled. Use 0 for selecting the entire training dataset.
*/
public long _score_training_samples = 10000l;
/**
* The number of validation dataset points to be used for scoring. Can be
* randomly sampled or stratified (if "balance classes" is set and "score
* validation sampling" is set to stratify). Use 0 for selecting the entire
* training dataset.
*/
public long _score_validation_samples = 0l;
/**
* Maximum fraction of wall clock time spent on model scoring on training and validation samples,
* and on diagnostics such as computation of feature importances (i.e., not on training).
*/
public double _score_duty_cycle = 0.1;
/**
* The stopping criteria in terms of classification error (1-accuracy) on the
* training data scoring dataset. When the error is at or below this threshold,
* training stops.
*/
public double _classification_stop = 0;
/**
* The stopping criteria in terms of regression error (MSE) on the training
* data scoring dataset. When the error is at or below this threshold, training
* stops.
*/
public double _regression_stop = 1e-6;
/**
* Enable quiet mode for less output to standard output.
*/
public boolean _quiet_mode = false;
/**
* Method used to sample the validation dataset for scoring, see Score Validation Samples above.
*/
public ClassSamplingMethod _score_validation_sampling = ClassSamplingMethod.Uniform;
/*Misc*/
/**
* Gather diagnostics for hidden layers, such as mean and RMS values of learning
* rate, momentum, weights and biases.
*/
public boolean _diagnostics = true;
/**
* Whether to compute variable importances for input features.
* The implemented method (by Gedeon) considers the weights connecting the
* input features to the first two hidden layers.
*/
public boolean _variable_importances = false;
/**
* Enable fast mode (minor approximation in back-propagation), should not affect results significantly.
*/
public boolean _fast_mode = true;
/**
* Ignore constant training columns (no information can be gained anyway).
*/
public boolean _ignore_const_cols = true;
/**
* Increase training speed on small datasets by splitting it into many chunks
* to allow utilization of all cores.
*/
public boolean _force_load_balance = true;
/**
* Replicate the entire training dataset onto every node for faster training on small datasets.
*/
public boolean _replicate_training_data = true;
/**
* Run on a single node for fine-tuning of model parameters. Can be useful for
* checkpoint resumes after training on multiple nodes for fast initial
* convergence.
*/
public boolean _single_node_mode = false;
/**
* Enable shuffling of training data (on each node). This option is
* recommended if training data is replicated on N nodes, and the number of training samples per iteration
* is close to N times the dataset size, where all nodes train will (almost) all
* the data. It is automatically enabled if the number of training samples per iteration is set to -1 (or to N
* times the dataset size or larger).
*/
public boolean _shuffle_training_data = false;
public MissingValuesHandling _missing_values_handling = MissingValuesHandling.MeanImputation;
public boolean _sparse = false;
public boolean _col_major = false;
public double _average_activation = 0;
public double _sparsity_beta = 0;
/**
* Max. number of categorical features, enforced via hashing (Experimental)
*/
public int _max_categorical_features = Integer.MAX_VALUE;
/**
* Force reproducibility on small data (will be slow - only uses 1 thread)
*/
public boolean _reproducible = false;
public boolean _export_weights_and_biases = false;
public enum MissingValuesHandling {
Skip, MeanImputation
}
public enum ClassSamplingMethod {
Uniform, Stratified
}
public enum InitialWeightDistribution {
UniformAdaptive, Uniform, Normal
}
/**
* Activation functions
*/
public enum Activation {
Tanh, TanhWithDropout, Rectifier, RectifierWithDropout, Maxout, MaxoutWithDropout
}
/**
* Loss functions
* Absolute, MeanSquare, Huber for regression
* Absolute, MeanSquare, Huber or CrossEntropy for classification
*/
public enum Loss {
Automatic, MeanSquare, CrossEntropy, Huber, Absolute
}
void validate( DeepLearning dl, boolean expensive ) {
dl.hide("_score_each_iteration", "Not used by Deep Learning.");
boolean classification = expensive || dl._nclass != 0 ? dl.isClassifier() : _loss == Loss.CrossEntropy;
if (_hidden == null || _hidden.length == 0) dl.error("_hidden", "There must be at least one hidden layer.");
for( int h : _hidden ) if( h<=0 ) dl.error("_hidden", "Hidden layer size must be positive.");
if (!_autoencoder) {
if (_valid == null)
dl.hide("_score_validation_samples", "score_validation_samples requires a validation frame.");
if (classification) {
dl.hide("_regression_stop", "regression_stop is used only with regression.");
} else {
dl.hide("_classification_stop", "classification_stop is used only with classification.");
// dl.hide("_max_hit_ratio_k", "max_hit_ratio_k is used only with classification.");
// dl.hide("_balance_classes", "balance_classes is used only with classification.");
}
// if( !classification || !_balance_classes )
// dl.hide("_class_sampling_factors", "class_sampling_factors requires both classification and balance_classes.");
if (!classification && _valid != null || _valid == null)
dl.hide("_score_validation_sampling", "score_validation_sampling requires classification and a validation frame.");
}
if (_activation != Activation.TanhWithDropout && _activation != Activation.MaxoutWithDropout && _activation != Activation.RectifierWithDropout)
dl.hide("_hidden_dropout_ratios", "hidden_dropout_ratios requires a dropout activation function.");
if (_hidden_dropout_ratios == null) {
// ok - nothing to check
}
else if (_hidden_dropout_ratios.length != _hidden.length) {
dl.error("_hidden_dropout_ratios", "Must have " + _hidden.length + " hidden layer dropout ratios.");
}
else if (_activation != Activation.TanhWithDropout && _activation != Activation.MaxoutWithDropout && _activation != Activation.RectifierWithDropout) {
if (!_quiet_mode) dl.hide("_hidden_dropout_ratios", "Ignoring hidden_dropout_ratios because a non-dropout activation function was specified.");
}
else if (ArrayUtils.maxValue(_hidden_dropout_ratios) >= 1 || ArrayUtils.minValue(_hidden_dropout_ratios) < 0) {
dl.error("_hidden_dropout_ratios", "Hidden dropout ratios must be >= 0 and <1.");
}
if (_input_dropout_ratio < 0 || _input_dropout_ratio >= 1)
dl.error("_input_dropout_ratio", "Input dropout must be >= 0 and <1.");
if (_score_duty_cycle < 0 || _score_duty_cycle > 1)
dl.error("_score_duty_cycle", "Score duty cycle must be >= 0 and <=1.");
if (_l1 < 0)
dl.error("_l1", "L1 penalty must be >= 0.");
if (_l2 < 0)
dl.error("_l2", "L2 penalty must be >= 0.");
if (H2O.CLOUD.size() == 1 && _replicate_training_data)
dl.hide("_replicate_training_data", "replicate_training_data is only valid with cloud size greater than 1.");
if (_single_node_mode && (H2O.CLOUD.size() == 1 || !_replicate_training_data))
dl.hide("_single_node_mode", "single_node_mode is only used with multi-node operation with replicated training data.");
if (_autoencoder)
dl.hide("_use_all_factor_levels", "use_all_factor_levels is mandatory in combination with autoencoder.");
if (getNumFolds() != 0)
dl.hide("_overwrite_with_best_model", "overwrite_with_best_model is unsupported in combination with n-fold cross-validation.");
if (_adaptive_rate) {
dl.hide("_rate", "rate is not used with adaptive_rate.");
dl.hide("_rate_annealing", "rate_annealing is not used with adaptive_rate.");
dl.hide("_rate_decay", "rate_decay is not used with adaptive_rate.");
dl.hide("_momentum_start", "momentum_start is not used with adaptive_rate.");
dl.hide("_momentum_ramp", "momentum_ramp is not used with adaptive_rate.");
dl.hide("_momentum_stable", "momentum_stable is not used with adaptive_rate.");
dl.hide("_nesterov_accelerated_gradient", "nesterov_accelerated_gradient is not used with adaptive_rate.");
} else {
// ! adaptive_rate
dl.hide("_rho", "rho is only used with adaptive_rate.");
dl.hide("_epsilon", "epsilon is only used with adaptive_rate.");
}
if (_initial_weight_distribution == InitialWeightDistribution.UniformAdaptive) {
dl.hide("_initial_weight_scale", "initial_weight_scale is not used if initial_weight_distribution == UniformAdaptive.");
}
if (getNumFolds() != 0)
dl.error("_n_folds", "n_folds is not yet implemented.");
if (_loss == null) {
if (expensive || dl._nclass != 0) {
dl.error("_loss", "Loss function must be specified. Try CrossEntropy for categorical response (classification), MeanSquare, Absolute or Huber for numerical response (regression).");
}
//otherwise, we might not know whether classification=true or false (from R, for example, the training data isn't known when init(false) is called).
} else if (_loss != Loss.Automatic) {
if (_autoencoder && _loss == Loss.CrossEntropy)
dl.error("_loss", "Cannot use CrossEntropy loss for auto-encoder.");
if (!classification && _loss == Loss.CrossEntropy)
dl.error("_loss", "For CrossEntropy loss, the response must be categorical.");
}
if (!classification && _loss == Loss.CrossEntropy)
dl.error("_loss", "For CrossEntropy loss, the response must be categorical. Either select MeanSquare, Absolute or Huber loss for regression, or use a categorical response.");
if (_score_training_samples < 0)
dl.error("_score_training_samples", "Number of training samples for scoring must be >= 0 (0 for all).");
if (_score_validation_samples < 0)
dl.error("_score_validation_samples", "Number of training samples for scoring must be >= 0 (0 for all).");
if(_autoencoder && _sparsity_beta > 0) {
if (_activation == Activation.Tanh || _activation == Activation.TanhWithDropout) {
if (_average_activation >= 1 || _average_activation <= -1)
dl.error("_average_activation", "Tanh average activation must be in (-1,1).");
}
else if (_activation == Activation.Rectifier || _activation == Activation.RectifierWithDropout) {
if (_average_activation <= 0)
dl.error("_average_activation", "Rectifier average activation must be positive.");
}
}
if (!_autoencoder && _sparsity_beta != 0) dl.info("_sparsity_beta", "Sparsity beta can only be used for autoencoder.");
// reason for the error message below is that validation might not have the same horizontalized features as the training data (or different order)
if (_autoencoder && _activation == Activation.Maxout) dl.error("_activation", "Maxout activation is not supported for auto-encoder.");
if (_max_categorical_features < 1) dl.error("_max_categorical_features", "max_categorical_features must be at least 1.");
if (!_sparse && _col_major) {
dl.error("_col_major", "Cannot use column major storage for non-sparse data handling.");
}
if (expensive) {
if (!classification && _balance_classes) {
dl.error("_balance_classes", "balance_classes requires classification.");
}
if (_class_sampling_factors != null && !_balance_classes) {
dl.error("_class_sampling_factors", "class_sampling_factors requires balance_classes to be enabled.");
}
if (_replicate_training_data && train().byteSize() > 1e10) {
dl.error("_replicate_training_data", "Compressed training dataset takes more than 10 GB, cannot run with replicate_training_data.");
}
}
}
}
public static class DeepLearningModelOutput extends SupervisedModel.SupervisedOutput {
@Override public int nfeatures() {
return _names.length - (autoencoder ? 0 : 1);
}
public DeepLearningModelOutput() { super(); }
public DeepLearningModelOutput(DeepLearning b) { super(b); }
boolean autoencoder;
DeepLearningScoring errors;
Key[] weights;
Key[] biases;
public TwoDimTable _variable_importances;
@Override public ModelCategory getModelCategory() {
return autoencoder ? ModelCategory.AutoEncoder : super.getModelCategory();
}
@Override public boolean isSupervised() { return !autoencoder; }
}
// Default publicly visible Schema is V2
public ModelSchema schema() { return new DeepLearningModelV3(); }
private volatile DeepLearningModelInfo model_info;
void set_model_info(DeepLearningModelInfo mi) { model_info = mi; }
final public DeepLearningModelInfo model_info() { return model_info; }
final public VarImp varImp() { return _output.errors.variable_importances; }
public long run_time;
private long start_time;
public long actual_train_samples_per_iteration;
public double time_for_communication_us; //helper for auto-tuning: time in microseconds for collective bcast/reduce of the model
public double epoch_counter;
public long training_rows;
public long validation_rows;
private DeepLearningScoring[] errors;
public DeepLearningScoring[] scoring_history() { return errors; }
// Keep the best model so far, based on a single criterion (overall class. error or MSE)
private float _bestError = Float.POSITIVE_INFINITY;
public Key actual_best_model_key;
// return the most up-to-date model metrics
DeepLearningScoring last_scored() { return errors == null ? null : errors[errors.length-1]; }
/**
* Get the parameters actually used for model building, not the user-given ones (_parms)
* They might differ since some defaults are filled in, and some invalid combinations are auto-disabled in modifyParams
* @return actually used parameters
*/
public final DeepLearningParameters get_params() { return model_info.get_params(); }
public float error() { return (float) (_output.isClassifier() ? cm().err() : mse()); }
@Override public ModelMetrics.MetricBuilder makeMetricBuilder(String[] domain) {
switch(_output.getModelCategory()) {
case Binomial: return new ModelMetricsBinomial.MetricBuilderBinomial(domain);
case Multinomial: return new ModelMetricsMultinomial.MetricBuilderMultinomial(_output.nclasses(),domain);
case Regression: return new ModelMetricsRegression.MetricBuilderRegression();
case AutoEncoder: return new ModelMetricsAutoEncoder.MetricBuilderAutoEncoder(_output.nfeatures());
default: throw H2O.unimpl("Invalid ModelCategory " + _output.getModelCategory());
}
}
public int compareTo(DeepLearningModel o) {
if (o._output.isClassifier() != _output.isClassifier()) throw new UnsupportedOperationException("Cannot compare classifier against regressor.");
if (o._output.nclasses() != _output.nclasses()) throw new UnsupportedOperationException("Cannot compare models with different number of classes.");
return (error() < o.error() ? -1 : error() > o.error() ? 1 : 0);
}
public static class DeepLearningScoring extends Iced {
// static final int API_WEAVER = 1;
// static public DocGen.FieldDoc[] DOC_FIELDS;
public double epoch_counter;
public long training_samples;
public long training_time_ms;
//training/validation sets
boolean validation;
int num_folds;
public long score_training_samples;
public long score_validation_samples;
public boolean classification;
VarImp variable_importances;
// classification
public ConfusionMatrix train_confusion_matrix;
public ConfusionMatrix valid_confusion_matrix;
public double train_err = Double.NaN;
public double valid_err = Double.NaN;
public double train_logloss = Double.NaN;
public double valid_logloss = Double.NaN;
public AUC2 training_AUC;
public AUC2 validation_AUC;
public float[] train_hitratio; // "Hit ratio on training data"
public float[] valid_hitratio; // "Hit ratio on validation data"
// regression
public double training_MSE = Double.NaN;
public double validation_MSE = Double.NaN;
public double training_R2 = Double.NaN;
public double validation_R2 = Double.NaN;
public long scoring_time;
DeepLearningScoring deep_clone() {
AutoBuffer ab = new AutoBuffer();
this.write(ab);
ab.flipForReading();
return (DeepLearningScoring) new DeepLearningScoring().read(ab);
}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("Training MSE: " + training_MSE + "\n");
sb.append("Training R^2: " + training_R2 + "\n");
if (classification) {
sb.append("Training LogLoss: " + train_logloss + "\n");
sb.append("Training " + train_confusion_matrix.table().toString(1));
sb.append("Training Misclassification"
+ (training_AUC != null ? " [using threshold for " + AUC2.DEFAULT_CM.toString().replace("_", " ") + "]: " : ": ")
+ String.format("%.2f", 100 * train_err) + "%");
if (training_AUC != null) sb.append(", AUC: " + String.format("%.4f", 100 * training_AUC._auc) + "%");
}
if (validation || num_folds>0) {
if (num_folds > 0) {
sb.append("\nDoing " + num_folds + "-fold cross-validation:");
}
sb.append("\nValidation MSE: " + validation_MSE + "\n");
sb.append("Validation R^2: " + validation_R2 + "\n");
if (classification) {
sb.append("Validation LogLoss: " + valid_logloss + "\n");
sb.append("Validation " + valid_confusion_matrix.table().toString(1));
sb.append("Validation Misclassification"
+ (validation_AUC != null ? " [using threshold for " + AUC2.DEFAULT_CM.toString().replace("_", " ") + "]: " : ": ")
+ String.format("%.2f", (100 * valid_err)) + "%");
if (validation_AUC != null) sb.append(", AUC: " + String.format("%.4f", 100 * validation_AUC._auc) + "%");
}
}
sb.append("\n");
return sb.toString();
}
}
final private static class ConfMat extends ConfusionMatrix {
final private double _err;
final private double _f1;
public ConfMat(double err, double f1) {
super(null, null);
_err=err;
_f1=f1;
}
@Override public double err() { return _err; }
@Override public double F1() { return _f1; }
}
public ConfusionMatrix cm() {
final DeepLearningScoring lasterror = last_scored();
if (lasterror == null) return null;
ConfusionMatrix cm = lasterror.validation || lasterror.num_folds > 0 ?
lasterror.valid_confusion_matrix :
lasterror.train_confusion_matrix;
if (cm == null ) {
if (lasterror.validation || lasterror.num_folds > 0) {
return new ConfMat(lasterror.valid_err, lasterror.validation_AUC != null ? lasterror.validation_AUC.maxF1() : 0);
} else {
return new ConfMat(lasterror.train_err, lasterror.training_AUC != null ? lasterror.training_AUC.maxF1() : 0);
}
}
return cm;
}
public double mse() {
if (errors == null) return Double.NaN;
return last_scored().validation || last_scored().num_folds > 0 ? last_scored().validation_MSE : last_scored().training_MSE;
}
public double logloss() {
if (errors == null) return Double.NaN;
return last_scored().validation || last_scored().num_folds > 0 ? last_scored().valid_logloss : last_scored().train_logloss;
}
private TwoDimTable createScoringHistoryTable(DeepLearningScoring[] errors) {
List<String> colHeaders = new ArrayList<>();
List<String> colTypes = new ArrayList<>();
List<String> colFormat = new ArrayList<>();
colHeaders.add("Timestamp"); colTypes.add("string"); colFormat.add("%s");
colHeaders.add("Duration"); colTypes.add("string"); colFormat.add("%s");
colHeaders.add("Training Speed"); colTypes.add("string"); colFormat.add("%s");
colHeaders.add("Epochs"); colTypes.add("double"); colFormat.add("%.5f");
colHeaders.add("Samples"); colTypes.add("long"); colFormat.add("%d");
colHeaders.add("Training MSE"); colTypes.add("double"); colFormat.add("%.5f");
if (!_output.autoencoder) {
colHeaders.add("Training R^2");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.isClassifier()) {
colHeaders.add("Training LogLoss");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.getModelCategory() == ModelCategory.Binomial) {
colHeaders.add("Training AUC");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.getModelCategory() == ModelCategory.Binomial || _output.getModelCategory() == ModelCategory.Multinomial) {
colHeaders.add("Training Classification Error");
colTypes.add("double");
colFormat.add("%.5f");
}
if (get_params()._valid != null) {
colHeaders.add("Validation MSE"); colTypes.add("double"); colFormat.add("%.5f");
if (!_output.autoencoder) {
colHeaders.add("Validation R^2");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.isClassifier()) {
colHeaders.add("Validation LogLoss");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.getModelCategory() == ModelCategory.Binomial) {
colHeaders.add("Validation AUC");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.isClassifier()) {
colHeaders.add("Validation Classification Error");
colTypes.add("double");
colFormat.add("%.5f");
}
} else if (get_params().getNumFolds() > 0) {
colHeaders.add("Cross-Validation MSE"); colTypes.add("double"); colFormat.add("%.5f");
// colHeaders.add("Validation R^2"); colTypes.add("double"); colFormat.add("%g");
if (_output.getModelCategory() == ModelCategory.Binomial) {
colHeaders.add("Cross-Validation AUC");
colTypes.add("double");
colFormat.add("%.5f");
}
if (_output.isClassifier()) {
colHeaders.add("Cross-Validation Classification Error");
colTypes.add("double");
colFormat.add("%.5f");
}
}
final int rows = errors.length;
TwoDimTable table = new TwoDimTable(
"Scoring History", null,
new String[rows],
colHeaders.toArray(new String[0]),
colTypes.toArray(new String[0]),
colFormat.toArray(new String[0]),
"");
int row = 0;
for( int i = 0; i<errors.length ; i++ ) {
final DeepLearningScoring e = errors[i];
int col = 0;
assert(row < table.getRowDim());
assert(col < table.getColDim());
DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
table.set(row, col++, fmt.print(start_time + e.training_time_ms));
table.set(row, col++, PrettyPrint.msecs(e.training_time_ms, true));
table.set(row, col++, e.training_time_ms == 0 ? null : (String.format("%.3f", e.training_samples/(e.training_time_ms/1e3)) + " rows/sec"));
table.set(row, col++, e.epoch_counter);
table.set(row, col++, e.training_samples);
table.set(row, col++, e.training_MSE);
if (!_output.autoencoder) {
table.set(row, col++, e.training_R2);
}
if (_output.isClassifier()) {
table.set(row, col++, e.train_logloss);
}
if (_output.getModelCategory() == ModelCategory.Binomial) {
table.set(row, col++, e.training_AUC != null ? e.training_AUC._auc : Double.NaN);
}
if (_output.isClassifier()) {
table.set(row, col++, e.train_err);
}
if (get_params()._valid != null) {
table.set(row, col++, e.validation_MSE);
if (!_output.autoencoder) {
table.set(row, col++, e.validation_R2);
}
if (_output.isClassifier()) {
table.set(row, col++, e.valid_logloss);
}
if (_output.getModelCategory() == ModelCategory.Binomial) {
table.set(row, col++, e.validation_AUC != null ? e.validation_AUC._auc : Double.NaN);
}
if (_output.isClassifier()) {
table.set(row, col++, e.valid_err);
}
}
else if(get_params().getNumFolds() > 1) {
throw H2O.unimpl("n_folds >= 2 is not (yet) implemented.");
}
row++;
}
return table;
}
// This describes the model, together with the parameters
// This will be shared: one per node
public static class DeepLearningModelInfo extends Iced {
public TwoDimTable summaryTable;
private DataInfo data_info;
public DataInfo data_info() { return data_info; }
// model is described by parameters and the following arrays
private Neurons.DenseRowMatrix[] dense_row_weights; //one 2D weight matrix per layer (stored as a 1D array each)
private Neurons.DenseColMatrix[] dense_col_weights; //one 2D weight matrix per layer (stored as a 1D array each)
private Neurons.DenseVector[] biases; //one 1D bias array per layer
private Neurons.DenseVector[] avg_activations; //one 1D array per hidden layer
// helpers for storing previous step deltas
// Note: These two arrays *could* be made transient and then initialized freshly in makeNeurons() and in DeepLearningTask.initLocal()
// But then, after each reduction, the weights would be lost and would have to restart afresh -> not *exactly* right, but close...
private Neurons.DenseRowMatrix[] dense_row_weights_momenta;
private Neurons.DenseColMatrix[] dense_col_weights_momenta;
private Neurons.DenseVector[] biases_momenta;
// helpers for AdaDelta
private Neurons.DenseRowMatrix[] dense_row_ada_dx_g;
private Neurons.DenseColMatrix[] dense_col_ada_dx_g;
private Neurons.DenseVector[] biases_ada_dx_g;
// compute model size (number of model parameters required for making predictions)
// momenta are not counted here, but they are needed for model building
public long size() {
long siz = 0;
for (Neurons.Matrix w : dense_row_weights) if (w != null) siz += w.size();
for (Neurons.Matrix w : dense_col_weights) if (w != null) siz += w.size();
for (Neurons.Vector b : biases) siz += b.size();
return siz;
}
// accessors to (shared) weights and biases - those will be updated racily (c.f. Hogwild!)
boolean has_momenta() { return get_params()._momentum_start != 0 || get_params()._momentum_stable != 0; }
boolean adaDelta() { return get_params()._adaptive_rate; }
public final Neurons.Matrix get_weights(int i) { return dense_row_weights[i] == null ? dense_col_weights[i] : dense_row_weights[i]; }
public final Neurons.DenseVector get_biases(int i) { return biases[i]; }
public final Neurons.Matrix get_weights_momenta(int i) { return dense_row_weights_momenta[i] == null ? dense_col_weights_momenta[i] : dense_row_weights_momenta[i]; }
public final Neurons.DenseVector get_biases_momenta(int i) { return biases_momenta[i]; }
public final Neurons.Matrix get_ada_dx_g(int i) { return dense_row_ada_dx_g[i] == null ? dense_col_ada_dx_g[i] : dense_row_ada_dx_g[i]; }
public final Neurons.DenseVector get_biases_ada_dx_g(int i) { return biases_ada_dx_g[i]; }
//accessor to shared parameter defining avg activations
public final Neurons.DenseVector get_avg_activations(int i) { return avg_activations[i]; }
private DeepLearningParameters parameters;
public final DeepLearningParameters get_params() { return parameters; }
private float[] mean_rate;
private float[] rms_rate;
private float[] mean_bias;
private float[] rms_bias;
private float[] mean_weight;
public float[] rms_weight;
public float[] mean_a;
private volatile boolean unstable = false;
public boolean unstable() { return unstable; }
public void set_unstable() { if (!unstable) computeStats(); unstable = true; }
private long processed_global;
public synchronized long get_processed_global() { return processed_global; }
public synchronized void set_processed_global(long p) { processed_global = p; }
public synchronized void add_processed_global(long p) { processed_global += p; }
private long processed_local;
public synchronized long get_processed_local() { return processed_local; }
public synchronized void set_processed_local(long p) { processed_local = p; }
public synchronized void add_processed_local(long p) { processed_local += p; }
public synchronized long get_processed_total() { return processed_global + processed_local; }
// package local helpers
int[] units; //number of neurons per layer, extracted from parameters and from datainfo
final boolean _classification; // Classification cache (nclasses>1)
final Frame _train; // Prepared training frame
final Frame _valid; // Prepared validation frame
public DeepLearningModelInfo() {
_classification = false;
_train = _valid = null;
}
public DeepLearningModelInfo(final DeepLearningParameters params, final DataInfo dinfo, boolean classification, Frame train, Frame valid) {
_classification = classification;
_train = train;
_valid = valid;
data_info = dinfo;
parameters = (DeepLearningParameters)params.clone();
modifyParms(parameters, parameters, _classification);
final int num_input = dinfo.fullN();
final int num_output = get_params()._autoencoder ? num_input : (_classification ? train.lastVec().cardinality() : 1);
assert(num_input > 0);
assert(num_output > 0);
if (has_momenta() && adaDelta()) throw new IllegalArgumentException("Cannot have non-zero momentum and adaptive rate at the same time.");
final int layers=get_params()._hidden.length;
// units (# neurons for each layer)
units = new int[layers+2];
if (get_params()._max_categorical_features <= Integer.MAX_VALUE - dinfo._nums)
units[0] = Math.min(dinfo._nums + get_params()._max_categorical_features, num_input);
else
units[0] = num_input;
System.arraycopy(get_params()._hidden, 0, units, 1, layers);
units[layers+1] = num_output;
boolean printLevels = units[0] > 1000L;
boolean warn = units[0] > 100000L;
if (printLevels) {
final String[][] domains = dinfo._adaptedFrame.domains();
int[] levels = new int[domains.length];
for (int i=0; i<levels.length; ++i) {
levels[i] = domains[i] != null ? domains[i].length : 0;
}
Arrays.sort(levels);
if (warn) {
Log.warn("===================================================================================================================================");
Log.warn(num_input + " input features" + (dinfo._cats > 0 ? " (after categorical one-hot encoding)" : "") + ". Can be slow and require a lot of memory.");
}
if (levels[levels.length-1] > 0) {
int levelcutoff = levels[levels.length-1-Math.min(10, levels.length-1)];
int count = 0;
for (int i=0; i<dinfo._adaptedFrame.numCols() - (get_params()._autoencoder ? 0 : 1) && count < 10; ++i) {
if (dinfo._adaptedFrame.domains()[i] != null && dinfo._adaptedFrame.domains()[i].length >= levelcutoff) {
if (warn) {
Log.warn("Categorical feature '" + dinfo._adaptedFrame._names[i] + "' has cardinality " + dinfo._adaptedFrame.domains()[i].length + ".");
} else {
Log.info("Categorical feature '" + dinfo._adaptedFrame._names[i] + "' has cardinality " + dinfo._adaptedFrame.domains()[i].length + ".");
}
}
count++;
}
}
if (warn) {
Log.warn("Suggestions:");
Log.warn(" *) Limit the size of the first hidden layer");
if (dinfo._cats > 0) {
Log.warn(" *) Limit the total number of one-hot encoded features with the parameter 'max_categorical_features'");
Log.warn(" *) Run h2o.interaction(...,pairwise=F) on high-cardinality categorical columns to limit the factor count, see http://learn.h2o.ai");
}
Log.warn("===================================================================================================================================");
}
}
// weights (to connect layers)
dense_row_weights = new Neurons.DenseRowMatrix[layers+1];
dense_col_weights = new Neurons.DenseColMatrix[layers+1];
// decide format of weight matrices row-major or col-major
if (get_params()._col_major) dense_col_weights[0] = new Neurons.DenseColMatrix(units[1], units[0]);
else dense_row_weights[0] = new Neurons.DenseRowMatrix(units[1], units[0]);
for (int i = 1; i <= layers; ++i)
dense_row_weights[i] = new Neurons.DenseRowMatrix(units[i + 1] /*rows*/, units[i] /*cols*/);
// biases (only for hidden layers and output layer)
biases = new Neurons.DenseVector[layers+1];
for (int i=0; i<=layers; ++i) biases[i] = new Neurons.DenseVector(units[i+1]);
// average activation (only for hidden layers)
if (get_params()._autoencoder && get_params()._sparsity_beta > 0) {
avg_activations = new Neurons.DenseVector[layers];
mean_a = new float[layers];
for (int i = 0; i < layers; ++i) avg_activations[i] = new Neurons.DenseVector(units[i + 1]);
}
fillHelpers();
// for diagnostics
mean_rate = new float[units.length];
rms_rate = new float[units.length];
mean_bias = new float[units.length];
rms_bias = new float[units.length];
mean_weight = new float[units.length];
rms_weight = new float[units.length];
}
// deep clone all weights/biases
DeepLearningModelInfo deep_clone() {
AutoBuffer ab = new AutoBuffer();
this.write(ab);
ab.flipForReading();
return (DeepLearningModelInfo) new DeepLearningModelInfo().read(ab);
}
void fillHelpers() {
if (has_momenta()) {
dense_row_weights_momenta = new Neurons.DenseRowMatrix[dense_row_weights.length];
dense_col_weights_momenta = new Neurons.DenseColMatrix[dense_col_weights.length];
if (dense_row_weights[0] != null)
dense_row_weights_momenta[0] = new Neurons.DenseRowMatrix(units[1], units[0]);
else
dense_col_weights_momenta[0] = new Neurons.DenseColMatrix(units[1], units[0]);
for (int i=1; i<dense_row_weights_momenta.length; ++i) dense_row_weights_momenta[i] = new Neurons.DenseRowMatrix(units[i+1], units[i]);
biases_momenta = new Neurons.DenseVector[biases.length];
for (int i=0; i<biases_momenta.length; ++i) biases_momenta[i] = new Neurons.DenseVector(units[i+1]);
}
else if (adaDelta()) {
dense_row_ada_dx_g = new Neurons.DenseRowMatrix[dense_row_weights.length];
dense_col_ada_dx_g = new Neurons.DenseColMatrix[dense_col_weights.length];
//AdaGrad
if (dense_row_weights[0] != null) {
dense_row_ada_dx_g[0] = new Neurons.DenseRowMatrix(units[1], 2*units[0]);
} else {
dense_col_ada_dx_g[0] = new Neurons.DenseColMatrix(2*units[1], units[0]);
}
for (int i=1; i<dense_row_ada_dx_g.length; ++i) {
dense_row_ada_dx_g[i] = new Neurons.DenseRowMatrix(units[i+1], 2*units[i]);
}
biases_ada_dx_g = new Neurons.DenseVector[biases.length];
for (int i=0; i<biases_ada_dx_g.length; ++i) {
biases_ada_dx_g[i] = new Neurons.DenseVector(2*units[i+1]);
}
}
}
public TwoDimTable createSummaryTable() {
Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(this);
long byte_size = new AutoBuffer().put(this).buf().length;
TwoDimTable table = new TwoDimTable(
"Status of Neuron Layers",
(!get_params()._autoencoder ? ("predicting " + _train.lastVecName() + ", ") : "") +
(get_params()._autoencoder ? "auto-encoder" :
_classification ? (units[units.length-1] + "-class classification") : "regression" )
+ ", " + get_params()._loss.toString() + " loss, "
+ String.format("%,d", size()) + " weights/biases, " + PrettyPrint.bytes(byte_size),
new String[neurons.length],
new String[]{"Layer", "Units", "Type", "Dropout", "L1", "L2",
"Mean Rate", "Rate RMS", "Momentum",
"Mean Weight", "Weight RMS",
"Mean Bias", "Bias RMS"
},
new String[]{"int", "int", "string", "double", "double", "double",
"double", "double", "double",
"double", "double",
"double", "double"
},
new String[]{"%d", "%d", "%s", "%2.2f %%", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f", "%5f"},
"");
final String format = "%7g";
for (int i = 0; i < neurons.length; ++i) {
table.set(i, 0, i + 1);
table.set(i, 1, neurons[i].units);
table.set(i, 2, neurons[i].getClass().getSimpleName());
if (i == 0) {
table.set(i, 3, neurons[i].params._input_dropout_ratio*100);
continue;
} else if (i < neurons.length - 1) {
if (neurons[i].params._hidden_dropout_ratios == null) {
table.set(i, 3, 0);
} else {
table.set(i, 3, neurons[i].params._hidden_dropout_ratios[i - 1]*100);
}
}
table.set(i, 4, neurons[i].params._l1);
table.set(i, 5, neurons[i].params._l2);
table.set(i, 6, (get_params()._adaptive_rate ? mean_rate[i] : neurons[i].rate(get_processed_total())));
table.set(i, 7, (get_params()._adaptive_rate ? rms_rate[i] : 0));
table.set(i, 8, get_params()._adaptive_rate ? 0 : neurons[i].momentum(get_processed_total()));
table.set(i, 9, mean_weight[i]);
table.set(i, 10, rms_weight[i]);
table.set(i, 11, mean_bias[i]);
table.set(i, 12, rms_bias[i]);
}
summaryTable = table;
return summaryTable;
}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
if (get_params()._diagnostics && !get_params()._quiet_mode) {
if (get_params()._sparsity_beta > 0) {
for (int k = 0; k < get_params()._hidden.length; k++) {
sb.append("Average activation in hidden layer ").append(k).append(" is ").append(mean_a[k]).append(" \n");
}
}
createSummaryTable();
sb.append(summaryTable.toString(1));
}
return sb.toString();
}
// DEBUGGING
public String toStringAll() {
StringBuilder sb = new StringBuilder();
sb.append(toString());
for (int i=0; i<units.length-1; ++i)
sb.append("\nweights[").append(i).append("][]=").append(Arrays.toString(get_weights(i).raw()));
for (int i=0; i<units.length-1; ++i) {
sb.append("\nbiases[").append(i).append("][]=").append(Arrays.toString(get_biases(i).raw()));
}
if (has_momenta()) {
for (int i=0; i<units.length-1; ++i)
sb.append("\nweights_momenta[").append(i).append("][]=").append(Arrays.toString(get_weights_momenta(i).raw()));
}
if (biases_momenta != null) {
for (int i=0; i<units.length-1; ++i) {
sb.append("\nbiases_momenta[").append(i).append("][]=").append(Arrays.toString(biases_momenta[i].raw()));
}
}
sb.append("\nunits[]=").append(Arrays.toString(units));
sb.append("\nprocessed global: ").append(get_processed_global());
sb.append("\nprocessed local: ").append(get_processed_local());
sb.append("\nprocessed total: ").append(get_processed_total());
sb.append("\n");
return sb.toString();
}
void initializeMembers() {
randomizeWeights();
//TODO: determine good/optimal/best initialization scheme for biases
// hidden layers
for (int i=0; i<get_params()._hidden.length; ++i) {
if (get_params()._activation == DeepLearningParameters.Activation.Rectifier
|| get_params()._activation == DeepLearningParameters.Activation.RectifierWithDropout
|| get_params()._activation == DeepLearningParameters.Activation.Maxout
|| get_params()._activation == DeepLearningParameters.Activation.MaxoutWithDropout
) {
// Arrays.fill(biases[i], 1.); //old behavior
Arrays.fill(biases[i].raw(), i == 0 ? 0.5f : 1f); //new behavior, might be slightly better
}
else if (get_params()._activation == DeepLearningParameters.Activation.Tanh || get_params()._activation == DeepLearningParameters.Activation.TanhWithDropout) {
Arrays.fill(biases[i].raw(), 0f);
}
}
Arrays.fill(biases[biases.length-1].raw(), 0f); //output layer
}
public void add(DeepLearningModelInfo other) {
for (int i=0;i<dense_row_weights.length;++i)
ArrayUtils.add(get_weights(i).raw(), other.get_weights(i).raw());
for (int i=0;i<biases.length;++i) ArrayUtils.add(biases[i].raw(), other.biases[i].raw());
if (avg_activations != null)
for (int i=0;i<avg_activations.length;++i)
ArrayUtils.add(avg_activations[i].raw(), other.biases[i].raw());
if (has_momenta()) {
assert(other.has_momenta());
for (int i=0;i<dense_row_weights_momenta.length;++i)
ArrayUtils.add(get_weights_momenta(i).raw(), other.get_weights_momenta(i).raw());
for (int i=0;i<biases_momenta.length;++i)
ArrayUtils.add(biases_momenta[i].raw(), other.biases_momenta[i].raw());
}
if (adaDelta()) {
assert(other.adaDelta());
for (int i=0;i<dense_row_ada_dx_g.length;++i) {
ArrayUtils.add(get_ada_dx_g(i).raw(), other.get_ada_dx_g(i).raw());
}
}
add_processed_local(other.get_processed_local());
}
protected void div(float N) {
for (int i=0; i<dense_row_weights.length; ++i)
ArrayUtils.div(get_weights(i).raw(), N);
for (Neurons.Vector bias : biases) ArrayUtils.div(bias.raw(), N);
if (avg_activations != null)
for (Neurons.Vector avgac : avg_activations)
ArrayUtils.div(avgac.raw(), N);
if (has_momenta()) {
for (int i=0; i<dense_row_weights_momenta.length; ++i)
ArrayUtils.div(get_weights_momenta(i).raw(), N);
for (Neurons.Vector bias_momenta : biases_momenta) ArrayUtils.div(bias_momenta.raw(), N);
}
if (adaDelta()) {
for (int i=0;i<dense_row_ada_dx_g.length;++i) {
ArrayUtils.div(get_ada_dx_g(i).raw(), N);
}
}
}
double uniformDist(Random rand, double min, double max) {
return min + rand.nextFloat() * (max - min);
}
void randomizeWeights() {
for (int w=0; w<dense_row_weights.length; ++w) {
final Random rng = water.util.RandomUtils.getRNG(get_params()._seed + 0xBAD5EED + w+1); //to match NeuralNet behavior
final double range = Math.sqrt(6. / (units[w] + units[w+1]));
for( int i = 0; i < get_weights(w).rows(); i++ ) {
for( int j = 0; j < get_weights(w).cols(); j++ ) {
if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.UniformAdaptive) {
if (w==dense_row_weights.length-1 && _classification)
get_weights(w).set(i,j, (float)(4.*uniformDist(rng, -range, range))); //Softmax might need an extra factor 4, since it's like a sigmoid
else
get_weights(w).set(i,j, (float)uniformDist(rng, -range, range));
}
else if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.Uniform) {
get_weights(w).set(i,j, (float)uniformDist(rng, -get_params()._initial_weight_scale, get_params()._initial_weight_scale));
}
else if (get_params()._initial_weight_distribution == DeepLearningParameters.InitialWeightDistribution.Normal) {
get_weights(w).set(i,j, (float)(rng.nextGaussian() * get_params()._initial_weight_scale));
}
}
}
}
}
// TODO: Add "subset randomize" function
// int count = Math.min(15, _previous.units);
// double min = -.1f, max = +.1f;
// //double min = -1f, max = +1f;
// for( int o = 0; o < units; o++ ) {
// for( int n = 0; n < count; n++ ) {
// int i = rand.nextInt(_previous.units);
// int w = o * _previous.units + i;
// _w[w] = uniformDist(rand, min, max);
/**
* Compute Variable Importance, based on
* GEDEON: DATA MINING OF INPUTS: ANALYSING MAGNITUDE AND FUNCTIONAL MEASURES
* @return variable importances for input features
*/
public float[] computeVariableImportances() {
float[] vi = new float[units[0]];
Arrays.fill(vi, 0f);
float[][] Qik = new float[units[0]][units[2]]; //importance of input i on output k
float[] sum_wj = new float[units[1]]; //sum of incoming weights into first hidden layer
float[] sum_wk = new float[units[2]]; //sum of incoming weights into output layer (or second hidden layer)
for (float[] Qi : Qik) Arrays.fill(Qi, 0f);
Arrays.fill(sum_wj, 0f);
Arrays.fill(sum_wk, 0f);
// compute sum of absolute incoming weights
for( int j = 0; j < units[1]; j++ ) {
for( int i = 0; i < units[0]; i++ ) {
float wij = get_weights(0).get(j, i);
sum_wj[j] += Math.abs(wij);
}
}
for( int k = 0; k < units[2]; k++ ) {
for( int j = 0; j < units[1]; j++ ) {
float wjk = get_weights(1).get(k,j);
sum_wk[k] += Math.abs(wjk);
}
}
// compute importance of input i on output k as product of connecting weights going through j
for( int i = 0; i < units[0]; i++ ) {
for( int k = 0; k < units[2]; k++ ) {
for( int j = 0; j < units[1]; j++ ) {
float wij = get_weights(0).get(j,i);
float wjk = get_weights(1).get(k,j);
//Qik[i][k] += Math.abs(wij)/sum_wj[j] * wjk; //Wong,Gedeon,Taggart '95
Qik[i][k] += Math.abs(wij)/sum_wj[j] * Math.abs(wjk)/sum_wk[k]; //Gedeon '97
}
}
}
// normalize Qik over all outputs k
for( int k = 0; k < units[2]; k++ ) {
float sumQk = 0;
for( int i = 0; i < units[0]; i++ ) sumQk += Qik[i][k];
for( int i = 0; i < units[0]; i++ ) Qik[i][k] /= sumQk;
}
// importance for feature i is the sum over k of i->k importances
for( int i = 0; i < units[0]; i++ ) vi[i] = ArrayUtils.sum(Qik[i]);
//normalize importances such that max(vi) = 1
ArrayUtils.div(vi, ArrayUtils.maxValue(vi));
return vi;
}
// compute stats on all nodes
public void computeStats() {
float[][] rate = get_params()._adaptive_rate ? new float[units.length-1][] : null;
if (get_params()._autoencoder && get_params()._sparsity_beta > 0) {
for (int k = 0; k < get_params()._hidden.length; k++) {
mean_a[k] = 0;
for (int j = 0; j < avg_activations[k].size(); j++)
mean_a[k] += avg_activations[k].get(j);
mean_a[k] /= avg_activations[k].size();
}
}
for( int y = 1; y < units.length; y++ ) {
mean_rate[y] = rms_rate[y] = 0;
mean_bias[y] = rms_bias[y] = 0;
mean_weight[y] = rms_weight[y] = 0;
for(int u = 0; u < biases[y-1].size(); u++) {
mean_bias[y] += biases[y-1].get(u);
}
if (rate != null) rate[y-1] = new float[get_weights(y-1).raw().length];
for(int u = 0; u < get_weights(y-1).raw().length; u++) {
mean_weight[y] += get_weights(y-1).raw()[u];
if (rate != null) {
// final float RMS_dx = (float)Math.sqrt(ada[y-1][2*u]+(float)get_params().epsilon);
// final float invRMS_g = (float)(1/Math.sqrt(ada[y-1][2*u+1]+(float)get_params().epsilon));
final float RMS_dx = MathUtils.approxSqrt(get_ada_dx_g(y-1).raw()[2*u]+(float)get_params()._epsilon);
final float invRMS_g = MathUtils.approxInvSqrt(get_ada_dx_g(y-1).raw()[2*u+1]+(float)get_params()._epsilon);
rate[y-1][u] = RMS_dx*invRMS_g; //not exactly right, RMS_dx should be from the previous time step -> but close enough for diagnostics.
mean_rate[y] += rate[y-1][u];
}
}
mean_bias[y] /= biases[y-1].size();
mean_weight[y] /= get_weights(y-1).size();
if (rate != null) mean_rate[y] /= rate[y-1].length;
for(int u = 0; u < biases[y-1].size(); u++) {
final double db = biases[y-1].get(u) - mean_bias[y];
rms_bias[y] += db * db;
}
for(int u = 0; u < get_weights(y-1).size(); u++) {
final double dw = get_weights(y-1).raw()[u] - mean_weight[y];
rms_weight[y] += dw * dw;
if (rate != null) {
final double drate = rate[y-1][u] - mean_rate[y];
rms_rate[y] += drate * drate;
}
}
rms_bias[y] = MathUtils.approxSqrt(rms_bias[y]/biases[y-1].size());
rms_weight[y] = MathUtils.approxSqrt(rms_weight[y] / get_weights(y - 1).size());
if (rate != null) rms_rate[y] = MathUtils.approxSqrt(rms_rate[y]/rate[y-1].length);
// rms_bias[y] = (float)Math.sqrt(rms_bias[y]/biases[y-1].length);
// rms_weight[y] = (float)Math.sqrt(rms_weight[y]/weights[y-1].length);
// if (rate != null) rms_rate[y] = (float)Math.sqrt(rms_rate[y]/rate[y-1].length);
// Abort the run if weights or biases are unreasonably large (Note that all input values are normalized upfront)
// This can happen with Rectifier units when L1/L2/max_w2 are all set to 0, especially when using more than 1 hidden layer.
final double thresh = 1e10;
unstable |= mean_bias[y] > thresh || isNaN(mean_bias[y])
|| rms_bias[y] > thresh || isNaN(rms_bias[y])
|| mean_weight[y] > thresh || isNaN(mean_weight[y])
|| rms_weight[y] > thresh || isNaN(rms_weight[y]);
}
}
// unique identifier for this model's state
protected long checksum_impl() {
long cs = parameters._seed;
cs ^= size() * get_processed_total();
cs ^= (long)(2234.3424*ArrayUtils.sum(mean_bias));
cs *= (long)(9234.1343*ArrayUtils.sum(rms_bias));
cs ^= (long)(9723.9734*ArrayUtils.sum(mean_weight));
cs *= (long)(9234.1783*ArrayUtils.sum(rms_weight));
cs ^= (long)(4273.2344*ArrayUtils.sum(mean_rate));
cs *= (long)(3378.1999*ArrayUtils.sum(rms_rate));
return cs;
}
}
/**
* Helper to allocate keys for output frames for weights and biases
* @param destKey
*/
private void makeWeightsBiases(Key destKey) {
if (!model_info.get_params()._export_weights_and_biases) {
_output.weights = null;
_output.biases = null;
} else {
_output.weights = new Key[model_info.get_params()._hidden.length + 1];
for (int i = 0; i < _output.weights.length; ++i) {
_output.weights[i] = Key.makeUserHidden(Key.make(destKey + ".weights." + i));
}
_output.biases = new Key[model_info.get_params()._hidden.length + 1];
for (int i = 0; i < _output.biases.length; ++i) {
_output.biases[i] = Key.makeUserHidden(Key.make(destKey + ".biases." + i));
}
}
}
/** Constructor to restart from a checkpointed model
* @param destKey New destination key for the model
* @param parms User-given parameters for checkpoint restart
* @param cp Checkpoint to restart from
* @param store_best_model Store only the best model instead of the latest one */
public DeepLearningModel(final Key destKey, final DeepLearningParameters parms, final DeepLearningModel cp, final boolean store_best_model, final DataInfo dataInfo) {
super(destKey, parms == null ? (DeepLearningParameters)cp._parms.clone() : parms, (DeepLearningModelOutput)cp._output.clone());
assert(_parms != cp._parms); //make sure we have a clone
model_info = cp.model_info.deep_clone(); //don't want to interfere with model being built, just make a deep copy and store that
if (store_best_model) {
model_info.data_info = dataInfo.deep_clone(); //replace previous data_info with updated version that's passed in (contains enum for classification)
} else {
model_info.data_info = dataInfo; //shallow clone is ok
if (parms != null) {
assert (_parms == parms);
assert (_parms._checkpoint == parms._checkpoint);
assert (_parms._checkpoint == cp._key);
}
// _parms._checkpoint = cp._key; //it's only a "real" checkpoint if job != null, otherwise a best model copy
}
assert(model_info().get_params() != cp.model_info().get_params()); //make sure we have a clone
actual_best_model_key = cp.actual_best_model_key;
start_time = cp.start_time;
run_time = cp.run_time;
training_rows = cp.training_rows; //copy the value to display the right number on the model page before training has started
validation_rows = cp.validation_rows; //copy the value to display the right number on the model page before training has started
_bestError = cp._bestError;
// deep clone scoring history
errors = cp.errors.clone();
for (int i=0; i<errors.length;++i)
errors[i] = cp.errors[i].deep_clone();
_output.errors = last_scored();
makeWeightsBiases(destKey);
_output._scoring_history = createScoringHistoryTable(errors);
_output._variable_importances = calcVarImp(last_scored().variable_importances);
_output._names = dataInfo._adaptedFrame.names();
_output._domains = dataInfo._adaptedFrame.domains();
// set proper timing
_timeLastScoreEnter = System.currentTimeMillis();
_timeLastScoreStart = 0;
_timeLastScoreEnd = 0;
_timeLastPrintStart = 0;
assert(Arrays.equals(_key._kb, destKey._kb));
}
public DeepLearningModel(final Key destKey, final DeepLearningParameters parms, final DeepLearningModelOutput output, Frame train, Frame valid) {
super(destKey, parms, output);
boolean classification = train.lastVec().isEnum();
final DataInfo dinfo = makeDataInfo(train, valid, _parms);
_output._names = train._names ; // Since changed by DataInfo, need to be reflected in the Model output as well
_output._domains= train.domains();
_output._names = dinfo._adaptedFrame.names();
_output._domains = dinfo._adaptedFrame.domains();
DKV.put(dinfo._key,dinfo);
model_info = new DeepLearningModelInfo(parms, dinfo, classification, train, valid);
actual_best_model_key = Key.makeUserHidden(Key.make());
if (parms.getNumFolds() != 0) actual_best_model_key = null;
if (!parms._autoencoder) {
errors = new DeepLearningScoring[1];
errors[0] = new DeepLearningScoring();
errors[0].validation = (parms._valid != null);
errors[0].num_folds = parms.getNumFolds();
_output.errors = last_scored();
_output._scoring_history = createScoringHistoryTable(errors);
_output._variable_importances = calcVarImp(last_scored().variable_importances);
}
makeWeightsBiases(destKey);
run_time = 0;
start_time = System.currentTimeMillis();
_timeLastScoreEnter = start_time;
assert _key.equals(destKey);
boolean fail = false;
long byte_size = 0;
try {
byte_size = new AutoBuffer().put(this).buf().length;
} catch(Throwable t) {
fail = true;
}
if (byte_size > Value.MAX || fail)
throw new IllegalArgumentException("Model is too large: PUBDEV-941");
}
/**
* Take user-given parameters and turn them into usable, fully populated parameters (e.g., to be used by Neurons during training)
* @param fromParms raw user-given parameters from the REST API
* @param toParms modified set of parameters, with defaults filled in
* @param classification
*/
public static void modifyParms(DeepLearningParameters fromParms, DeepLearningParameters toParms, boolean classification) {
if (fromParms._hidden_dropout_ratios == null) {
if (fromParms._activation == DeepLearningParameters.Activation.TanhWithDropout
|| fromParms._activation == DeepLearningParameters.Activation.MaxoutWithDropout
|| fromParms._activation == DeepLearningParameters.Activation.RectifierWithDropout) {
toParms._hidden_dropout_ratios = new double[fromParms._hidden.length];
if (!fromParms._quiet_mode)
Log.info("_hidden_dropout_ratios: Automatically setting all hidden dropout ratios to 0.5.");
Arrays.fill(toParms._hidden_dropout_ratios, 0.5);
}
} else {
toParms._hidden_dropout_ratios = fromParms._hidden_dropout_ratios.clone();
}
if (H2O.CLOUD.size() == 1 && fromParms._replicate_training_data) {
Log.info("_replicate_training_data: Disabling replicate_training_data on 1 node.");
toParms._replicate_training_data = false;
}
if (fromParms._single_node_mode && (H2O.CLOUD.size() == 1 || !fromParms._replicate_training_data)) {
Log.info("_single_node_mode: Disabling single_node_mode (only for multi-node operation with replicated training data).");
toParms._single_node_mode = false;
}
if (!fromParms._use_all_factor_levels && fromParms._autoencoder ) {
Log.info("_use_all_factor_levels: Automatically enabling all_factor_levels for auto-encoders.");
toParms._use_all_factor_levels = true;
}
if(fromParms._overwrite_with_best_model && fromParms.getNumFolds() != 0) {
Log.info("_overwrite_with_best_model: Disabling overwrite_with_best_model in combination with n-fold cross-validation.");
toParms._overwrite_with_best_model = false;
}
if (fromParms._adaptive_rate) {
Log.info("_adaptive_rate: Using automatic learning rate. Ignoring the following input parameters: "
+ "rate, rate_decay, rate_annealing, momentum_start, momentum_ramp, momentum_stable, nesterov_accelerated_gradient.");
toParms._rate = 0;
toParms._rate_decay = 0;
toParms._rate_annealing = 0;
toParms._momentum_start = 0;
toParms._momentum_ramp = 0;
toParms._momentum_stable = 0;
toParms._nesterov_accelerated_gradient = false;
} else {
Log.info("_adaptive_rate: Using manual learning rate. Ignoring the following input parameters: "
+ "rho, epsilon.");
toParms._rho = 0;
toParms._epsilon = 0;
}
if (fromParms.getNumFolds() != 0) {
if (fromParms._overwrite_with_best_model) {
Log.info("_overwrite_with_best_model: Automatically disabling overwrite_with_best_model, since the final model is the only scored model with n-fold cross-validation.");
toParms._overwrite_with_best_model = false;
}
}
if (fromParms._loss == DeepLearningParameters.Loss.Automatic) {
toParms._loss = (classification && !fromParms._autoencoder) ? DeepLearningParameters.Loss.CrossEntropy : DeepLearningParameters.Loss.MeanSquare;
Log.info("_loss: Automatically setting loss function to " + toParms._loss);
}
if (fromParms._reproducible) {
Log.info("_reproducibility: Automatically enabling force_load_balancing, disabling single_node_mode and replicate_training_data\n"
+"and setting train_samples_per_iteration to -1 to enforce reproducibility.");
toParms._force_load_balance = true;
toParms._single_node_mode = false;
toParms._train_samples_per_iteration = -1;
toParms._replicate_training_data = false; //there's no benefit from having multiple nodes compute the exact same thing, and then average it back to the same
// replicate_training_data = true; //doesn't hurt, but does replicated identical work
}
}
public long _timeLastScoreEnter; //not transient: needed for HTML display page
transient private long _timeLastScoreStart;
transient private long _timeLastScoreEnd;
transient private long _timeLastPrintStart;
/**
*
* @param ftrain potentially downsampled training data for scoring
* @param ftest potentially downsampled validation data for scoring
* @param job_key key of the owning job
* @param progressKey key of the progress
* @return true if model building is ongoing
*/
boolean doScoring(Frame ftrain, Frame ftest, Key job_key, Key progressKey) {
boolean keep_running;
try {
final long now = System.currentTimeMillis();
epoch_counter = (float)model_info().get_processed_total()/training_rows;
final double time_last_iter_millis = Math.max(5,now-_timeLastScoreEnter);
// Auto-tuning
// if multi-node and auto-tuning and at least 10 ms for communication (to avoid doing thins on multi-JVM on same node),
// then adjust the auto-tuning parameter 'actual_train_samples_per_iteration' such that the targeted ratio of comm to comp is achieved
// Note: actual communication time is estimated by the NetworkTest's collective test.
if (H2O.CLOUD.size() > 1 && get_params()._train_samples_per_iteration == -2 && time_for_communication_us > 1e4) {
// Log.info("Time taken for communication: " + PrettyPrint.usecs((long)time_for_communication_us));
// Log.info("Time taken for Map/Reduce iteration: " + PrettyPrint.msecs((long)time_last_iter_millis, true));
final double comm_to_work_ratio = (time_for_communication_us *1e-3) / time_last_iter_millis;
// Log.info("Ratio of network communication to computation: " + String.format("%.3f", comm_to_work_ratio));
// Log.info("target_comm_to_work: " + get_params().target_ratio_comm_to_comp);
final double correction = get_params()._target_ratio_comm_to_comp / comm_to_work_ratio;
// Log.warn("Suggested value for train_samples_per_iteration: " + get_params().actual_train_samples_per_iteration/correction);
actual_train_samples_per_iteration /= correction;
actual_train_samples_per_iteration = Math.max(1, actual_train_samples_per_iteration);
}
run_time += time_last_iter_millis;
_timeLastScoreEnter = now;
keep_running = (epoch_counter < model_info().get_params()._epochs);
final long sinceLastScore = now -_timeLastScoreStart;
final long sinceLastPrint = now -_timeLastPrintStart;
if (!keep_running || sinceLastPrint > get_params()._score_interval * 1000) { //print this after every score_interval, not considering duty cycle
_timeLastPrintStart = now;
if (!get_params()._quiet_mode) {
Log.info("Training time: " + PrettyPrint.msecs(run_time, true)
+ ". Processed " + String.format("%,d", model_info().get_processed_total()) + " samples" + " (" + String.format("%.3f", epoch_counter) + " epochs)."
+ " Speed: " + String.format("%.3f", 1000. * model_info().get_processed_total() / run_time) + " samples/sec.\n");
}
}
// this is potentially slow - only do every so often
if( !keep_running ||
(sinceLastScore > get_params()._score_interval *1000 //don't score too often
&&(double)(_timeLastScoreEnd-_timeLastScoreStart)/sinceLastScore < get_params()._score_duty_cycle) ) { //duty cycle
if (progressKey != null) {
new Job.ProgressUpdate("Scoring on " + ftrain.numRows() + " training samples" +
(ftest != null ? (", " + ftest.numRows() + " validation samples)") : ")")
).fork(progressKey);
}
final boolean printme = !get_params()._quiet_mode;
_timeLastScoreStart = now;
if (get_params()._diagnostics) model_info().computeStats();
DeepLearningScoring err = new DeepLearningScoring();
err.training_time_ms = run_time;
err.epoch_counter = epoch_counter;
err.training_samples = model_info().get_processed_total();
err.validation = ftest != null;
err.score_training_samples = ftrain.numRows();
err.classification = _output.isClassifier();
if (get_params()._autoencoder) {
if (printme) Log.info("Scoring the auto-encoder.");
// training
{
final Frame mse_frame = scoreAutoEncoder(ftrain, Key.make());
final Vec l2 = mse_frame.anyVec();
Log.info("Mean reconstruction error on training data: " + l2.mean() + "\n");
err.training_MSE = l2.mean();
mse_frame.delete();
hex.ModelMetricsAutoEncoder mm1 = (ModelMetricsAutoEncoder)ModelMetrics.getFromDKV(this,ftrain);
err.training_MSE = err.train_err = mm1._MSE;
_output._training_metrics = mm1;
}
if (ftest != null) {
final Frame mse_frame = scoreAutoEncoder(ftest, Key.make());
final Vec l2 = mse_frame.anyVec();
Log.info("Mean reconstruction error on validation data: " + l2.mean() + "\n");
err.validation_MSE = l2.mean();
mse_frame.delete();
hex.ModelMetricsAutoEncoder mm1 = (ModelMetricsAutoEncoder)ModelMetrics.getFromDKV(this,ftest);
err.validation_MSE = err.valid_err = mm1._MSE;
_output._validation_metrics = mm1;
}
} else {
if (printme) Log.info("Scoring the model.");
// compute errors
final String m = model_info().toString();
if (m.length() > 0) Log.info(m);
final Frame trainPredict = score(ftrain);
trainPredict.delete();
hex.ModelMetricsSupervised mm1 = (ModelMetricsSupervised)ModelMetrics.getFromDKV(this,ftrain);
if (mm1 instanceof ModelMetricsBinomial) {
ModelMetricsBinomial mm = (ModelMetricsBinomial)(mm1);
err.training_AUC = mm._auc;
err.train_confusion_matrix = mm.cm();
err.train_err = err.train_confusion_matrix.err();
err.train_logloss = mm._logloss;
}
else if (mm1 instanceof ModelMetricsMultinomial) {
ModelMetricsMultinomial mm = (ModelMetricsMultinomial)(mm1);
err.train_confusion_matrix = mm.cm();
err.train_err = err.train_confusion_matrix.err();
err.train_logloss = mm._logloss;
err.train_hitratio = mm._hit_ratios;
}
err.training_MSE = mm1._MSE;
err.training_R2 = mm1.r2();
_output._training_metrics = mm1;
if (get_params()._score_training_samples != 0 && get_params()._score_training_samples < ftrain.numRows()) {
_output._training_metrics._description = "Metrics reported on " + ftrain.numRows() + " training set samples";
}
if (ftest != null) {
Frame validPred = score(ftest);
validPred.delete();
hex.ModelMetricsSupervised mm2 = (ModelMetricsSupervised)hex.ModelMetrics.getFromDKV(this, ftest);
if (mm2 != null) {
if (mm2 instanceof ModelMetricsBinomial) {
ModelMetricsBinomial mm = (ModelMetricsBinomial) (mm2);
err.validation_AUC = mm._auc;
err.valid_confusion_matrix = mm.cm();
err.valid_logloss = mm._logloss;
err.valid_err = err.valid_confusion_matrix.err();
} else if (mm2 instanceof ModelMetricsMultinomial) {
ModelMetricsMultinomial mm = (ModelMetricsMultinomial) (mm2);
err.valid_confusion_matrix = mm.cm();
err.valid_err = err.valid_confusion_matrix.err();
err.valid_logloss = mm._logloss;
err.valid_hitratio = mm._hit_ratios;
}
err.validation_MSE = mm2._MSE;
err.validation_R2 = mm2.r2();
_output._validation_metrics = mm2;
if (get_params()._score_validation_samples != 0 && get_params()._score_validation_samples != ftest.numRows()) {
_output._validation_metrics._description = "Metrics reported on " + ftest.numRows() + " validation set samples";
if (get_params()._score_validation_sampling == DeepLearningParameters.ClassSamplingMethod.Stratified) {
_output._validation_metrics._description += " (stratified sampling)";
}
}
}
}
}
if (get_params()._variable_importances) {
if (!get_params()._quiet_mode) Log.info("Computing variable importances.");
final float[] vi = model_info().computeVariableImportances();
err.variable_importances = new VarImp(vi, Arrays.copyOfRange(model_info().data_info().coefNames(), 0, vi.length));
}
_timeLastScoreEnd = System.currentTimeMillis();
err.scoring_time = System.currentTimeMillis() - now;
// enlarge the error array by one, push latest score back
if (errors == null) {
errors = new DeepLearningScoring[]{err};
} else {
DeepLearningScoring[] err2 = new DeepLearningScoring[errors.length + 1];
System.arraycopy(errors, 0, err2, 0, errors.length);
err2[err2.length - 1] = err;
errors = err2;
}
_output.errors = last_scored();
water.util.Timer t = new Timer();
// store weights and matrices to Frames
if (_output.weights != null && _output.biases != null) {
for (int i = 0; i < _output.weights.length; ++i) {
model_info.get_weights(i).toFrame(_output.weights[i]);
}
for (int i = 0; i < _output.biases.length; ++i) {
model_info.get_biases(i).toFrame(_output.biases[i]);
}
Log.info("Writing weights and biases to Frames took " + t.time()/1000. + " seconds.");
}
_output._scoring_history = createScoringHistoryTable(errors);
_output._variable_importances = calcVarImp(last_scored().variable_importances);
_output._model_summary = model_info.createSummaryTable();
if (!get_params()._autoencoder) {
// always keep a copy of the best model so far (based on the following criterion)
if (actual_best_model_key != null && get_params()._overwrite_with_best_model && (
// if we have a best_model in DKV, then compare against its error() (unless it's a different model as judged by the network size)
(DKV.get(actual_best_model_key) != null && (error() < DKV.get(actual_best_model_key).<DeepLearningModel>get().error() || !Arrays.equals(model_info().units, DKV.get(actual_best_model_key).<DeepLearningModel>get().model_info().units)))
||
// otherwise, compare against our own _bestError
(DKV.get(actual_best_model_key) == null && error() < _bestError)
) ) {
if (!get_params()._quiet_mode)
Log.info("Error reduced from " + _bestError + " to " + error() + ".");
_bestError = error();
putMeAsBestModel(actual_best_model_key);
// debugging check
//if (false) {
// DeepLearningModel bestModel = DKV.get(actual_best_model_key).get();
// final Frame fr = ftest != null ? ftest : ftrain;
// final Frame bestPredict = bestModel.score(fr);
// final Frame hitRatio_bestPredict = new Frame(bestPredict);
// final double err3 = calcError(fr, fr.lastVec(), bestPredict, hitRatio_bestPredict, "cross-check",
// printme, get_params()._max_confusion_matrix_size, new hex.ConfusionMatrix2(), _output.isClassifier() && _output.nclasses() == 2 ? new AUC(null,null) : null, null);
// if (_output.isClassifier())
// assert (ftest != null ? Math.abs(err.valid_err - err3) < 1e-5 : Math.abs(err.train_err - err3) < 1e-5);
// else
// assert (ftest != null ? Math.abs(err.validation_MSE - err3) < 1e-5 : Math.abs(err.training_MSE - err3) < 1e-5);
// bestPredict.delete();
}
// else {
// // keep output JSON small
// if (errors.length > 1) {
// if (last_scored().training_AUC != null) last_scored().training_AUC.clear();
// if (last_scored().validation_AUC != null) last_scored().validation_AUC.clear();
// last_scored()._variable_importances = null;
// print the freshly scored model to ASCII
if (keep_running)
for (String s : toString().split("\n")) Log.info(s);
if (printme) Log.info("Time taken for scoring and diagnostics: " + PrettyPrint.msecs(err.scoring_time, true));
}
}
if (model_info().unstable()) {
Log.warn(unstable_msg);
keep_running = false;
} else if ( (_output.isClassifier() && last_scored().train_err <= get_params()._classification_stop)
|| (!_output.isClassifier() && last_scored().training_MSE <= get_params()._regression_stop) ) {
Log.info("Achieved requested predictive accuracy on the training data. Model building completed.");
keep_running = false;
}
update(job_key);
}
catch (Exception ex) {
//ex.printStackTrace();
throw new RuntimeException(ex);
// return false;
}
return keep_running;
}
@Override public String toString() {
return _output.toString();
}
/** Make either a prediction or a reconstruction.
* @param orig Test dataset
* @param adaptedFr Test dataset, adapted to the model
* @return A frame containing the prediction or reconstruction
*/
@Override protected Frame scoreImpl(Frame orig, Frame adaptedFr, String destination_key) {
if (!get_params()._autoencoder) {
return super.scoreImpl(orig,adaptedFr,destination_key);
} else {
// Reconstruction
final int len = model_info().data_info().fullN();
String prefix = "reconstr_";
assert(model_info().data_info()._responses == 0);
String[] coefnames = model_info().data_info().coefNames();
assert(len == coefnames.length);
Frame adaptFrm = new Frame(adaptedFr);
for( int c=0; c<len; c++ )
adaptFrm.add(prefix+coefnames[c],adaptFrm.anyVec().makeZero());
new MRTask() {
@Override public void map( Chunk chks[] ) {
double tmp [] = new double[_output._names.length];
float preds[] = new float [len];
final Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info);
for( int row=0; row<chks[0]._len; row++ ) {
float p[] = score_autoencoder(chks, row, tmp, preds, neurons);
for( int c=0; c<preds.length; c++ )
chks[_output._names.length+c].set(row,p[c]);
}
}
}.doAll(adaptFrm);
// Return the predicted columns
int x=_output._names.length, y=adaptFrm.numCols();
Frame f = adaptFrm.extractFrame(x, y); //this will call vec_impl() and we cannot call the delete() below just yet
f = new Frame((null == destination_key ? Key.make() : Key.make(destination_key)), f.names(), f.vecs());
DKV.put(f);
makeMetricBuilder(null).makeModelMetrics(this, orig, Double.NaN);
return f;
}
}
/**
* Predict from raw double values representing the data
* @param data raw array containing categorical values (horizontalized to 1,0,0,1,0,0 etc.) and numerical values (0.35,1.24,5.3234,etc), both can contain NaNs
* @param preds predicted label and per-class probabilities (for classification), predicted target (regression), can contain NaNs
* @return preds, can contain NaNs
*/
@Override public double[] score0(double[] data, double[] preds) {
if (model_info().unstable()) {
Log.warn(unstable_msg);
throw new UnsupportedOperationException("Trying to predict with an unstable model.");
}
Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info);
((Neurons.Input)neurons[0]).setInput(-1, data);
DeepLearningTask.step(-1, neurons, model_info, false, null);
float[] out = neurons[neurons.length - 1]._a.raw();
if (_output.isClassifier()) {
assert (preds.length == out.length + 1);
for (int i = 0; i < preds.length - 1; ++i) {
preds[i + 1] = out[i];
if (Double.isNaN(preds[i + 1])) throw new RuntimeException("Predicted class probability NaN!");
}
// label assignment happens later - explicitly mark it as invalid here
preds[0] = -1;
} else {
if (model_info().data_info()._normRespMul != null)
preds[0] = ((double)out[0] / model_info().data_info()._normRespMul[0] + model_info().data_info()._normRespSub[0]);
else
preds[0] = (double)out[0];
if (Double.isNaN(preds[0])) throw new RuntimeException("Predicted regression target NaN!");
}
return preds;
}
/**
* Score auto-encoded reconstruction (on-the-fly, without allocating the reconstruction as done in Frame score(Frame fr))
* @param frame Original data (can contain response, will be ignored)
* @return Frame containing one Vec with reconstruction error (MSE) of each reconstructed row, caller is responsible for deletion
*/
public Frame scoreAutoEncoder(Frame frame, Key destination_key) {
if (!get_params()._autoencoder)
throw new H2OIllegalArgumentException("Only for AutoEncoder Deep Learning model.", "");
final int len = _output._names.length;
Frame adaptFrm = new Frame(frame);
Vec v0 = adaptFrm.anyVec().makeZero();
Scope.enter();
adaptTestForTrain(adaptFrm,true);
adaptFrm.add("Reconstruction.MSE", v0);
new MRTask() {
@Override public void map( Chunk chks[] ) {
double tmp [] = new double[len];
final Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info);
for( int row=0; row<chks[0]._len; row++ ) {
for( int i=0; i<len; i++ )
tmp[i] = chks[i].atd(row);
//store the per-row reconstruction error (MSE) in the last column
chks[len].set(row, score_autoencoder(tmp, null, neurons));
}
}
}.doAll(adaptFrm);
Scope.exit();
Frame res = adaptFrm.extractFrame(len, adaptFrm.numCols());
res = new Frame(destination_key, res.names(), res.vecs());
DKV.put(res);
makeMetricBuilder(null).makeModelMetrics(this, frame, res.vecs()[0].mean());
return res;
}
@Override public Frame score(Frame fr, String destination_key) {
if (!_parms._autoencoder)
return super.score(fr, destination_key);
else {
Frame adaptFr = new Frame(fr);
adaptTestForTrain(adaptFr, true); // Adapt
Frame output = scoreImpl(fr, adaptFr, destination_key); // Score
Vec[] vecs = adaptFr.vecs();
for (int i = 0; i < vecs.length; i++)
if (fr.find(vecs[i]) != -1) // Exists in the original frame?
vecs[i] = null; // Do not delete it
adaptFr.delete();
return output;
}
}
/**
* Score auto-encoded reconstruction (on-the-fly, and materialize the deep features of given layer
* @param frame Original data (can contain response, will be ignored)
* @param layer index of the hidden layer for which to extract the features
* @return Frame containing the deep features (#cols = hidden[layer])
*/
public Frame scoreDeepFeatures(Frame frame, final int layer) {
if (layer < 0 || layer >= model_info().get_params()._hidden.length)
throw new H2OIllegalArgumentException("hidden layer (index) to extract must be between " + 0 + " and " + (model_info().get_params()._hidden.length-1),"");
final int len = _output.nfeatures();
Vec resp = null;
if (isSupervised()) {
int ridx = frame.find(_output.responseName());
if (ridx != -1) { // drop the response for scoring!
frame = new Frame(frame);
resp = frame.vecs()[ridx];
frame.remove(ridx);
}
}
Frame adaptFrm = new Frame(frame);
//create new features, will be dense
final int features = model_info().get_params()._hidden[layer];
Vec[] vecs = adaptFrm.anyVec().makeZeros(features);
Scope.enter();
adaptTestForTrain(_output._names, null /*don't skip response*/, _output._domains, adaptFrm, _parms.missingColumnsType(), true);
for (int j=0; j<features; ++j) {
adaptFrm.add("DF.L"+(layer+1)+".C" + (j+1), vecs[j]);
}
new MRTask() {
@Override public void map( Chunk chks[] ) {
double tmp [] = new double[len];
final Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info);
for( int row=0; row<chks[0]._len; row++ ) {
for( int i=0; i<len; i++ )
tmp[i] = chks[i].atd(row);
((Neurons.Input)neurons[0]).setInput(-1, tmp);
DeepLearningTask.step(-1, neurons, model_info, false, null);
float[] out = neurons[layer+1]._a.raw(); //extract the layer-th hidden feature
for( int c=0; c<features; c++ )
chks[_output._names.length+c].set(row,out[c]);
}
}
}.doAll(adaptFrm);
// Return just the output columns
int x=_output._names.length, y=adaptFrm.numCols();
Frame ret = adaptFrm.extractFrame(x, y);
if (resp != null) ret.prepend(_output.responseName(), resp);
Scope.exit();
return ret;
}
// Make (potentially expanded) reconstruction
private float[] score_autoencoder(Chunk[] chks, int row_in_chunk, double[] tmp, float[] preds, Neurons[] neurons) {
assert(get_params()._autoencoder);
assert(tmp.length == _output._names.length);
for( int i=0; i<tmp.length; i++ )
tmp[i] = chks[i].atd(row_in_chunk);
score_autoencoder(tmp, preds, neurons); // this fills preds, returns MSE error (ignored here)
return preds;
}
/**
* Helper to reconstruct original data into preds array and compute the reconstruction error (MSE)
* @param data Original data (unexpanded)
* @param preds Reconstruction (potentially expanded)
* @return reconstruction error
*/
private double score_autoencoder(double[] data, float[] preds, Neurons[] neurons) {
assert(model_info().get_params()._autoencoder);
if (model_info().unstable()) {
Log.warn(unstable_msg);
throw new UnsupportedOperationException("Trying to predict with an unstable model.");
}
((Neurons.Input)neurons[0]).setInput(-1, data); // expands categoricals inside
DeepLearningTask.step(-1, neurons, model_info, false, null); // reconstructs data in expanded space
float[] in = neurons[0]._a.raw(); //input (expanded)
float[] out = neurons[neurons.length - 1]._a.raw(); //output (expanded)
assert(in.length == out.length);
// First normalize categorical reconstructions to be probabilities
// (such that they can be better compared to the input where one factor was 1 and the rest was 0)
// model_info().data_info().softMaxCategoricals(out,out); //only modifies the categoricals
// Compute MSE of reconstruction in expanded space (with categorical probabilities)
double l2 = 0;
for (int i = 0; i < in.length; ++i)
l2 += Math.pow((out[i] - in[i]), 2);
l2 /= in.length;
if (preds!=null) {
// Now scale back numerical columns to original data space (scale + shift)
model_info().data_info().unScaleNumericals(out, out); //only modifies the numericals
System.arraycopy(out, 0, preds, 0, out.length); //copy reconstruction into preds
}
return l2;
}
/**
* Compute quantile-based threshold (in reconstruction error) to find outliers
* @param mse Vector containing reconstruction errors
* @param quantile Quantile for cut-off
* @return Threshold in MSE value for a point to be above the quantile
*/
public double calcOutlierThreshold(Vec mse, double quantile) {
Frame mse_frame = new Frame(Key.make(), new String[]{"Reconstruction.MSE"}, new Vec[]{mse});
DKV.put(mse_frame._key, mse_frame);
QuantileModel.QuantileParameters parms = new QuantileModel.QuantileParameters();
parms._train = mse_frame._key;
parms._probs = new double[]{quantile};
Quantile job = new Quantile(parms).trainModel();
QuantileModel kmm = job.get();
job.remove();
double q = kmm._output._quantiles[0][0];
kmm.delete();
DKV.remove(mse_frame._key);
return q;
}
// helper to push this model to another key (for keeping good models)
private void putMeAsBestModel(Key bestModelKey) {
DeepLearningModel bestModel = new DeepLearningModel(bestModelKey, null, this, true, model_info().data_info());
DKV.put(bestModel._key, bestModel);
assert (DKV.get(bestModelKey) != null);
assert (bestModel.compareTo(this) <= 0);
}
@Override public void delete() {
if (_output.weights != null && _output.biases != null) {
for (Key k : _output.weights) {
if (DKV.getGet(k) != null) ((Frame) DKV.getGet(k)).delete();
}
for (Key k : _output.biases) {
if (DKV.getGet(k) != null) ((Frame) DKV.getGet(k)).delete();
}
}
super.delete();
}
void delete_xval_models( ) {
// if (get_params().xval_models != null) {
// for (Key k : get_params().xval_models) {
// DKV.get(k).<DeepLearningModel>get().delete_best_model();
// DKV.get(k).<DeepLearningModel>get().delete();
}
private String getHeader() {
assert get_params()._autoencoder;
StringBuilder sb = new StringBuilder();
final int len = model_info().data_info().fullN();
String prefix = "reconstr_";
assert (model_info().data_info()._responses == 0);
String[] coefnames = model_info().data_info().coefNames();
assert (len == coefnames.length);
for (int c = 0; c < len; c++) {
if (c>0) sb.append(",");
sb.append(prefix + coefnames[c]);
}
return sb.toString();
}
@Override protected SB toJavaInit(SB sb, SB fileContextSB) {
sb = super.toJavaInit(sb, fileContextSB);
String mname = JCodeGen.toJavaId(_key.toString());
Neurons[] neurons = DeepLearningTask.makeNeuronsForTesting(model_info());
final DeepLearningParameters p = model_info.get_params();
sb.ip("public boolean isSupervised() { return " + isSupervised() + "; }").nl();
sb.ip("public int nfeatures() { return "+_output.nfeatures()+"; }").nl();
sb.ip("public int nclasses() { return "+ (p._autoencoder ? neurons[neurons.length-1].units : _output.nclasses()) + "; }").nl();
sb.ip("public ModelCategory getModelCategory() { return ModelCategory."+_output.getModelCategory()+"; }").nl();
if (model_info().data_info()._nums > 0) {
JCodeGen.toStaticVar(sb, "NUMS", new double[model_info().data_info()._nums], "Workspace for storing numerical input variables.");
JCodeGen.toStaticVar(sb, "NORMMUL", model_info().data_info()._normMul, "Standardization/Normalization scaling factor for numerical variables.");
JCodeGen.toStaticVar(sb, "NORMSUB", model_info().data_info()._normSub, "Standardization/Normalization offset for numerical variables.");
}
if (model_info().data_info()._cats > 0) {
JCodeGen.toStaticVar(sb, "CATS", new int[model_info().data_info()._cats], "Workspace for storing categorical input variables.");
}
JCodeGen.toStaticVar(sb, "CATOFFSETS", model_info().data_info()._catOffsets, "Workspace for categorical offsets.");
if (model_info().data_info()._normRespMul != null) {
JCodeGen.toStaticVar(sb, "NORMRESPMUL", model_info().data_info()._normRespMul, "Standardization/Normalization scaling factor for response.");
JCodeGen.toStaticVar(sb, "NORMRESPSUB", model_info().data_info()._normRespSub, "Standardization/Normalization offset for response.");
}
if (p._hidden_dropout_ratios != null) {
JCodeGen.toStaticVar(sb, "HIDDEN_DROPOUT_RATIOS", p._hidden_dropout_ratios, "Hidden layer dropout ratios.");
}
int[] layers = new int[neurons.length];
for (int i=0;i<neurons.length;++i)
layers[i] = neurons[i].units;
JCodeGen.toStaticVar(sb, "NEURONS", layers, "Number of neurons for each layer.");
if (get_params()._autoencoder) {
sb.i(1).p("public int getPredsSize() { return " + model_info.units[model_info.units.length-1] + "; }").nl();
sb.i(1).p("public boolean isAutoEncoder() { return true; }").nl();
sb.i(1).p("public String getHeader() { return \"" + getHeader() + "\"; }").nl();
}
// activation storage
sb.i(1).p("// Storage for neuron activation values.").nl();
sb.i(1).p("public static final float[][] ACTIVATION = new float[][] {").nl();
for (int i=0; i<neurons.length; i++) {
String colInfoClazz = mname + "_Activation_"+i;
sb.i(2).p("/* ").p(neurons[i].getClass().getSimpleName()).p(" */ ");
sb.p(colInfoClazz).p(".VALUES");
if (i!=neurons.length-1) sb.p(',');
sb.nl();
fileContextSB.i().p("// Neuron activation values for ").p(neurons[i].getClass().getSimpleName()).p(" layer").nl();
JCodeGen.toClassWithArray(fileContextSB, null, colInfoClazz, new float[layers[i]]);
}
sb.i(1).p("};").nl();
// biases
sb.i(1).p("// Neuron bias values.").nl();
sb.i(1).p("public static final float[][] BIAS = new float[][] {").nl();
for (int i=0; i<neurons.length; i++) {
String colInfoClazz = mname + "_Bias_"+i;
sb.i(2).p("/* ").p(neurons[i].getClass().getSimpleName()).p(" */ ");
sb.p(colInfoClazz).p(".VALUES");
if (i!=neurons.length-1) sb.p(',');
sb.nl();
fileContextSB.i().p("// Neuron bias values for ").p(neurons[i].getClass().getSimpleName()).p(" layer").nl();
float[] bias = i == 0 ? null : new float[model_info().get_biases(i-1).size()];
if (i>0) {
for (int j=0; j<bias.length; ++j) bias[j] = model_info().get_biases(i-1).get(j);
}
JCodeGen.toClassWithArray(fileContextSB, null, colInfoClazz, bias);
}
sb.i(1).p("};").nl();
// weights
sb.i(1).p("// Connecting weights between neurons.").nl();
sb.i(1).p("public static final float[][] WEIGHT = new float[][] {").nl();
for (int i=0; i<neurons.length; i++) {
String colInfoClazz = mname + "_Weight_"+i;
sb.i(2).p("/* ").p(neurons[i].getClass().getSimpleName()).p(" */ ");
sb.p(colInfoClazz).p(".VALUES");
if (i!=neurons.length-1) sb.p(',');
sb.nl();
if (i > 0) {
fileContextSB.i().p("// Neuron weights connecting ").
p(neurons[i - 1].getClass().getSimpleName()).p(" and ").
p(neurons[i].getClass().getSimpleName()).
p(" layer").nl();
}
float[] weights = i == 0 ? null : new float[model_info().get_weights(i-1).rows()*model_info().get_weights(i-1).cols()];
if (i>0) {
final int rows = model_info().get_weights(i-1).rows();
final int cols = model_info().get_weights(i-1).cols();
for (int j=0; j<rows; ++j)
for (int k=0; k<cols; ++k)
weights[j*cols+k] = model_info().get_weights(i-1).get(j,k);
}
JCodeGen.toClassWithArray(fileContextSB, null, colInfoClazz, weights);
}
sb.i(1).p("};").nl();
return sb;
}
@Override protected boolean toJavaCheckTooBig() { return (model_info.size() > 1e6); }
@Override protected void toJavaPredictBody( final SB bodySb, final SB classCtxSb, final SB fileCtxSb) {
SB model = new SB();
final DeepLearningParameters p = model_info.get_params();
bodySb.i().p("java.util.Arrays.fill(preds,0);").nl();
final int cats = model_info().data_info()._cats;
final int nums = model_info().data_info()._nums;
// initialize input layer
if (nums > 0) bodySb.i().p("java.util.Arrays.fill(NUMS,0f);").nl();
if (cats > 0) bodySb.i().p("java.util.Arrays.fill(CATS,0);").nl();
bodySb.i().p("int i = 0, ncats = 0;").nl();
if (cats > 0) {
bodySb.i().p("for(; i<"+cats+"; ++i) {").nl();
bodySb.i(1).p("if (!Double.isNaN(data[i])) {").nl();
bodySb.i(2).p("int c = (int) data[i];").nl();
if (model_info().data_info()._useAllFactorLevels)
bodySb.i(2).p("CATS[ncats++] = c + CATOFFSETS[i];").nl();
else
bodySb.i(2).p("if (c != 0) CATS[ncats++] = c + CATOFFSETS[i] - 1;").nl();
bodySb.i(1).p("}").nl();
bodySb.i().p("}").nl();
}
if (nums > 0) {
bodySb.i().p("final int n = data.length;").nl();
bodySb.i().p("for(; i<n; ++i) {").nl();
bodySb.i(1).p("NUMS[i" + (cats > 0 ? "-" + cats : "") + "] = Double.isNaN(data[i]) ? 0 : ");
if (model_info().data_info()._normMul != null) {
bodySb.p("(data[i] - NORMSUB[i" + (cats > 0 ? "-" + cats : "") + "])*NORMMUL[i" + (cats > 0 ? "-" + cats : "") + "];").nl();
} else {
bodySb.p("data[i];").nl();
}
bodySb.i(0).p("}").nl();
}
bodySb.i().p("java.util.Arrays.fill(ACTIVATION[0],0);").nl();
if (cats > 0) {
bodySb.i().p("for (i=0; i<ncats; ++i) ACTIVATION[0][CATS[i]] = 1f;").nl();
}
if (nums > 0) {
bodySb.i().p("for (i=0; i<NUMS.length; ++i) {").nl();
bodySb.i(1).p("ACTIVATION[0][CATOFFSETS[CATOFFSETS.length-1] + i] = Double.isNaN(NUMS[i]) ? 0f : (float) NUMS[i];").nl();
bodySb.i().p("}").nl();
}
boolean tanh=(p._activation == DeepLearningParameters.Activation.Tanh || p._activation == DeepLearningParameters.Activation.TanhWithDropout);
boolean relu=(p._activation == DeepLearningParameters.Activation.Rectifier || p._activation == DeepLearningParameters.Activation.RectifierWithDropout);
boolean maxout=(p._activation == DeepLearningParameters.Activation.Maxout || p._activation == DeepLearningParameters.Activation.MaxoutWithDropout);
final String stopping = p._autoencoder ? "(i<=ACTIVATION.length-1)" : "(i<ACTIVATION.length-1)";
// make prediction: forward propagation
bodySb.i().p("for (i=1; i<ACTIVATION.length; ++i) {").nl();
bodySb.i(1).p("java.util.Arrays.fill(ACTIVATION[i],0f);").nl();
if (maxout) {
bodySb.i(1).p("float rmax = 0;").nl();
bodySb.i(1).p("for (int r=0; r<ACTIVATION[i].length; ++r) {").nl();
bodySb.i(2).p("final int cols = ACTIVATION[i-1].length;").nl();
bodySb.i(2).p("float cmax = Float.NEGATIVE_INFINITY;").nl();
bodySb.i(2).p("for (int c=0; c<cols; ++c) {").nl();
bodySb.i(3).p("if " + stopping + " cmax = Math.max(ACTIVATION[i-1][c] * WEIGHT[i][r*cols+c], cmax);").nl();
bodySb.i(3).p("else ACTIVATION[i][r] += ACTIVATION[i-1][c] * WEIGHT[i][r*cols+c];").nl();
bodySb.i(2).p("}").nl();
bodySb.i(2).p("if "+ stopping +" ACTIVATION[i][r] = Float.isInfinite(cmax) ? 0f : cmax;").nl();
bodySb.i(2).p("ACTIVATION[i][r] += BIAS[i][r];").nl();
bodySb.i(2).p("if " + stopping + " rmax = Math.max(rmax, ACTIVATION[i][r]);").nl();
bodySb.i(1).p("}").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; ++r) {").nl();
bodySb.i(3).p("if (rmax > 1 ) ACTIVATION[i][r] /= rmax;").nl();
} else {
// optimized
bodySb.i(1).p("int cols = ACTIVATION[i-1].length;").nl();
bodySb.i(1).p("int rows = ACTIVATION[i].length;").nl();
bodySb.i(1).p("int extra=cols-cols%8;").nl();
bodySb.i(1).p("int multiple = (cols/8)*8-1;").nl();
bodySb.i(1).p("int idx = 0;").nl();
bodySb.i(1).p("float[] a = WEIGHT[i];").nl();
bodySb.i(1).p("float[] x = ACTIVATION[i-1];").nl();
bodySb.i(1).p("float[] y = BIAS[i];").nl();
bodySb.i(1).p("float[] res = ACTIVATION[i];").nl();
bodySb.i(1).p("for (int row=0; row<rows; ++row) {").nl();
bodySb.i(2).p("float psum0 = 0, psum1 = 0, psum2 = 0, psum3 = 0, psum4 = 0, psum5 = 0, psum6 = 0, psum7 = 0;").nl();
bodySb.i(2).p("for (int col = 0; col < multiple; col += 8) {").nl();
bodySb.i(3).p("int off = idx + col;").nl();
bodySb.i(3).p("psum0 += a[off ] * x[col ];").nl();
bodySb.i(3).p("psum1 += a[off + 1] * x[col + 1];").nl();
bodySb.i(3).p("psum2 += a[off + 2] * x[col + 2];").nl();
bodySb.i(3).p("psum3 += a[off + 3] * x[col + 3];").nl();
bodySb.i(3).p("psum4 += a[off + 4] * x[col + 4];").nl();
bodySb.i(3).p("psum5 += a[off + 5] * x[col + 5];").nl();
bodySb.i(3).p("psum6 += a[off + 6] * x[col + 6];").nl();
bodySb.i(3).p("psum7 += a[off + 7] * x[col + 7];").nl();
bodySb.i(2).p("}").nl();
bodySb.i(2).p("res[row] += psum0 + psum1 + psum2 + psum3;").nl();
bodySb.i(2).p("res[row] += psum4 + psum5 + psum6 + psum7;").nl();
bodySb.i(2).p("for (int col = extra; col < cols; col++)").nl();
bodySb.i(3).p("res[row] += a[idx + col] * x[col];").nl();
bodySb.i(2).p("res[row] += y[row];").nl();
bodySb.i(2).p("idx += cols;").nl();
bodySb.i(1).p("}").nl();
// Activation function
bodySb.i(1).p("if " + stopping + " {").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; ++r) {").nl();
if (tanh) {
bodySb.i(3).p("ACTIVATION[i][r] = 1f - 2f / (1f + (float)Math.exp(2*ACTIVATION[i][r]));").nl();
} else if (relu) {
bodySb.i(3).p("ACTIVATION[i][r] = Math.max(0f, ACTIVATION[i][r]);").nl();
}
}
if (p._hidden_dropout_ratios != null) {
bodySb.i(3).p("if (i<ACTIVATION.length-1) {").nl();
bodySb.i(4).p("ACTIVATION[i][r] *= HIDDEN_DROPOUT_RATIOS[i-1];").nl();
bodySb.i(3).p("}").nl();
}
// if (maxout) bodySb.i(1).p("}").nl();
bodySb.i(2).p("}").nl();
if (!maxout) bodySb.i(1).p("}").nl();
if (_output.isClassifier()) {
bodySb.i(1).p("if (i == ACTIVATION.length-1) {").nl();
// softmax
bodySb.i(2).p("float max = ACTIVATION[i][0];").nl();
bodySb.i(2).p("for (int r=1; r<ACTIVATION[i].length; r++) {").nl();
bodySb.i(3).p("if (ACTIVATION[i][r]>max) max = ACTIVATION[i][r];").nl();
bodySb.i(2).p("}").nl();
bodySb.i(2).p("float scale = 0f;").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; r++) {").nl();
bodySb.i(3).p("ACTIVATION[i][r] = (float) Math.exp(ACTIVATION[i][r] - max);").nl();
bodySb.i(3).p("scale += ACTIVATION[i][r];").nl();
bodySb.i(2).p("}").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; r++) {").nl();
bodySb.i(3).p("if (Float.isNaN(ACTIVATION[i][r]))").nl();
bodySb.i(4).p("throw new RuntimeException(\"Numerical instability, predicted NaN.\");").nl();
bodySb.i(3).p("ACTIVATION[i][r] /= scale;").nl();
bodySb.i(3).p("preds[r+1] = ACTIVATION[i][r];").nl();
bodySb.i(2).p("}").nl();
bodySb.i(1).p("}").nl();
bodySb.i().p("}").nl();
} else if (!p._autoencoder) { //Regression
bodySb.i(1).p("if (i == ACTIVATION.length-1) {").nl();
// regression: set preds[1], FillPreds0 will put it into preds[0]
if (model_info().data_info()._normRespMul != null) {
bodySb.i(2).p("preds[1] = (ACTIVATION[i][0] / NORMRESPMUL[0] + NORMRESPSUB[0]);").nl();
}
else {
bodySb.i(2).p("preds[1] = ACTIVATION[i][0];").nl();
}
bodySb.i(2).p("if (Double.isNaN(preds[1])) throw new RuntimeException(\"Predicted regression target NaN!\");").nl();
bodySb.i(1).p("}").nl();
bodySb.i().p("}").nl();
} else { //AutoEncoder
bodySb.i(1).p("if (i == ACTIVATION.length-1) {").nl();
bodySb.i(2).p("for (int r=0; r<ACTIVATION[i].length; r++) {").nl();
bodySb.i(3).p("if (Float.isNaN(ACTIVATION[i][r]))").nl();
bodySb.i(4).p("throw new RuntimeException(\"Numerical instability, reconstructed NaN.\");").nl();
bodySb.i(3).p("preds[r] = ACTIVATION[i][r];").nl();
bodySb.i(2).p("}").nl();
if (model_info().data_info()._nums > 0) {
int ns = model_info().data_info().numStart();
bodySb.i(2).p("for (int k=" + ns + "; k<" + model_info().data_info().fullN() + "; ++k) {").nl();
bodySb.i(3).p("preds[k] = preds[k] / NORMMUL[k-" + ns + "] + NORMSUB[k-" + ns + "];").nl();
bodySb.i(2).p("}").nl();
}
bodySb.i(1).p("}").nl();
bodySb.i().p("}").nl();
// DEBUGGING
// bodySb.i().p("System.out.println(java.util.Arrays.toString(data));").nl();
// bodySb.i().p("System.out.println(java.util.Arrays.toString(ACTIVATION[0]));").nl();
// bodySb.i().p("System.out.println(java.util.Arrays.toString(ACTIVATION[ACTIVATION.length-1]));").nl();
// bodySb.i().p("System.out.println(java.util.Arrays.toString(preds));").nl();
// bodySb.i().p("System.out.println(\"\");").nl();
}
fileCtxSb.p(model);
if (_output.autoencoder) return;
if (_output.isClassifier()) {
if (_parms._balance_classes)
bodySb.ip("hex.genmodel.GenModel.correctProbabilities(preds, PRIOR_CLASS_DISTRIB, MODEL_CLASS_DISTRIB);").nl();
bodySb.ip("preds[0] = hex.genmodel.GenModel.getPrediction(preds, data, " + defaultThreshold()+");").nl();
} else {
bodySb.ip("preds[0] = (float)preds[1];").nl();
}
}
transient private final String unstable_msg = "Job was aborted due to observed numerical instability (exponential growth)."
+ "\nTry a different initial distribution, a bounded activation function or adding"
+ "\nregularization with L1, L2 or max_w2 and/or use a smaller learning rate or faster annealing.";
@Override protected long checksum_impl() {
return super.checksum_impl() * model_info.checksum_impl();
}
}
|
package hudson.plugins.scm_sync_configuration.repository;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import hudson.model.Hudson;
import hudson.plugins.scm_sync_configuration.ScmSyncConfigurationBusiness;
import hudson.plugins.scm_sync_configuration.ScmSyncConfigurationPlugin;
import hudson.plugins.scm_sync_configuration.model.ScmContext;
import hudson.plugins.scm_sync_configuration.scms.SCM;
import hudson.plugins.scm_sync_configuration.util.ScmSyncConfigurationBaseTest;
import java.io.File;
import org.codehaus.plexus.util.FileUtils;
import org.junit.Before;
import org.junit.Test;
import org.powermock.core.classloader.annotations.PrepareForTest;
@PrepareForTest(SCM.class)
public class InitRepositoryTest extends ScmSyncConfigurationBaseTest {
private ScmSyncConfigurationBusiness sscBusiness;
@Before
public void initBusiness() throws Throwable{
this.sscBusiness = new ScmSyncConfigurationBusiness();
}
@Test
public void shouldNotInitializeAnyRepositoryWhenScmContextIsEmpty() throws Throwable {
ScmContext emptyContext = new ScmContext(null, null);
sscBusiness.init(emptyContext);
assertThat(sscBusiness.scmCheckoutDirectorySettledUp(emptyContext), is(false));
emptyContext = new ScmContext(null, getSCMRepositoryURL());
sscBusiness.init(emptyContext);
assertThat(sscBusiness.scmCheckoutDirectorySettledUp(emptyContext), is(false));
SCM mockedSCM = createSCMMock(true);
emptyContext = new ScmContext(mockedSCM, null);
sscBusiness.init(emptyContext);
assertThat(sscBusiness.scmCheckoutDirectorySettledUp(emptyContext), is(false));
}
@Test
public void shouldInitializeLocalRepositoryWhenScmContextIsCorrect() throws Throwable {
SCM mockedSCM = createSCMMock(true);
ScmContext scmContext = new ScmContext(mockedSCM, getSCMRepositoryURL());
sscBusiness.init(scmContext);
assertThat(sscBusiness.scmCheckoutDirectorySettledUp(scmContext), is(true));
}
@Test
public void shouldInitializeLocalRepositoryWhenScmContextIsCorrentAndEvenIfScmDirectoryDoesntExist() throws Throwable {
SCM mockedSCM = createSCMMock(true);
ScmContext scmContext = new ScmContext(mockedSCM, getSCMRepositoryURL()+"/path/that/doesnt/exist/");
sscBusiness.init(scmContext);
assertThat(sscBusiness.scmCheckoutDirectorySettledUp(scmContext), is(true));
}
@Test
public void shouldResetCheckoutConfigurationDirectoryWhenAsked() throws Throwable {
// Initializing the repository...
SCM mockedSCM = createSCMMock(true);
ScmContext scmContext = new ScmContext(mockedSCM, getSCMRepositoryURL());
sscBusiness.init(scmContext);
// After init, local checkouted repository should exists
assertThat(getCurrentScmSyncConfigurationCheckoutDirectory().exists(), is(true));
// Populating checkoutConfiguration directory ..
File fileWhichShouldBeDeletedAfterReset = new File(getCurrentScmSyncConfigurationCheckoutDirectory().getAbsolutePath()+"/hello.txt");
assertThat(fileWhichShouldBeDeletedAfterReset.createNewFile(), is(true));
FileUtils.fileWrite(fileWhichShouldBeDeletedAfterReset.getAbsolutePath(), "Hello world !");
// Reseting the repository, without cleanup
sscBusiness.initializeRepository(scmContext, false);
assertThat(fileWhichShouldBeDeletedAfterReset.exists(), is(true));
// Reseting the repository with cleanup
sscBusiness.initializeRepository(scmContext, true);
assertThat(fileWhichShouldBeDeletedAfterReset.exists(), is(false));
}
@Test
public void shouldSynchronizeHudsonFiles() throws Throwable {
// Initializing the repository...
SCM mockedSCM = createSCMMock(true);
ScmContext scmContext = new ScmContext(mockedSCM, getSCMRepositoryURL());
sscBusiness.init(scmContext);
// Synchronizing hudson config files
sscBusiness.synchronizeAllConfigs(scmContext, ScmSyncConfigurationPlugin.AVAILABLE_STRATEGIES, Hudson.getInstance().getMe());
verifyCurrentScmContentMatchesHierarchy("expected-scm-hierarchies/InitRepositoryTest.shouldSynchronizeHudsonFiles/");
}
}
|
package org.jeo.map;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A styling rule.
*
* @author Justin Deoliveira, OpenGeo
*
*/
public class Rule {
/**
* ordered parts of the rule, composed of other rules, including self
*/
List<Rule> parts = new ArrayList<Rule>();
/**
* style properties of the rule
*/
Map<String,Object> props = null;
/**
* rule selectors
*/
List<Selector> selectors = new ArrayList<Selector>();
public Map<String, Object> properties() {
if (props == null) {
return Collections.emptyMap();
}
return props();
}
public List<Selector> getSelectors() {
return selectors;
}
public List<Rule> nested() {
List<Rule> nested = new ArrayList<Rule>();
for (Rule r : parts) {
if (r != this) {
nested.add(r);
}
}
return nested;
}
public Object get(String key) {
return get(key, null);
}
public Object get(String key, Object def) {
if (props == null) {
return def;
}
Map<String,Object> props = props();
return props.containsKey(key) ? props.get(key) : def;
}
public void put(String key, Object val) {
props().put(key, val);
}
public void putAll(Map<String, Object> map) {
if (map == null || map.isEmpty()) {
return;
}
props().putAll(map);
}
public void add(Rule rule) {
parts.add(rule);
}
public RGB color(String key, RGB def) {
Object obj = get(key, def);
return obj != null ? toRGB(obj) : null;
}
public String string(String key, String def) {
Object obj = get(key, def);
return obj != null ? obj.toString() : null;
}
public double number(String key, double def) {
return toDouble(get(key, def));
}
public float number(String key, float def) {
return (float) number(key, (double) def);
}
public double[] numbers(String key, double... def) {
Object obj = get(key, def);
if (obj == null) {
return null;
}
if (obj instanceof double[]) {
return (double[]) obj;
}
if (obj.getClass().isArray()) {
int n = Array.getLength(obj);
double[] d = new double[n];
for (int i = 0; i < n; i++) {
d[i] = toDouble(Array.get(obj, i));
}
return d;
}
String s = obj.toString();
if (s.contains(" ")) {
return toDoubles(s, " ");
}
else if (s.contains(",")) {
return toDoubles(s, ",");
}
//TODO: attempt to convert from string delimiated by ' ', or ','
throw new IllegalArgumentException("Unable to convert " + obj + " to array");
}
protected Map<String,Object> props() {
if (props == null) {
props = new LinkedHashMap<String, Object>();
parts.add(this);
}
return props;
}
protected double toDouble(Object obj) {
if (obj == null) {
return Double.NaN;
}
if (obj instanceof Number) {
return ((Number)obj).doubleValue();
}
return Double.parseDouble(obj.toString());
}
protected double[] toDoubles(String s, String delim) {
String[] split = s.split(delim);
double[] d = new double[split.length];
for (int i = 0; i < d.length; i++) {
d[i] = toDouble(split[i].trim());
}
return d;
}
protected RGB toRGB(Object obj) {
if (obj == null) {
return null;
}
if (obj instanceof RGB) {
return (RGB) obj;
}
return new RGB(obj.toString());
}
/**
* Flattens the rule by merging the top level rule with all nested rules.
*/
public List<Rule> flatten() {
//TODO: multiple levels of nesting?
List<Rule> flat = new ArrayList<Rule>();
for (Rule r : parts) {
if (r != this) {
r = merge(r);
}
flat.add(r);
}
return flat;
}
/**
* Merges this rule with another rule, resulting in a new rule object.
* <p>
* Any properties defined by this rule and <tt>other</tt> will be overwritten with the values
* from <tt>other</tt>.
* </p>
*/
public Rule merge(Rule other) {
Rule merged = new Rule();
//merge the properties
merged.putAll(properties());
merged.putAll(other.properties());
//cross product all selectors
List<Selector> selectors = new ArrayList<Selector>();
for (Selector s1 : getSelectors()) {
for (Selector s2 : other.getSelectors()) {
selectors.add(s1.merge(s2));
}
}
merged.getSelectors().addAll(selectors);
return merged;
}
@Override
public String toString() {
return toString(0);
}
public String toString(int indent) {
StringBuilder sb = new StringBuilder();
String pad = ""; for (int i = 0; i < indent; i++) { pad += " "; };
sb.append(pad);
for (Selector s : getSelectors()) {
sb.append(s).append(",");
}
if (sb.length() > 0) {
sb.setLength(sb.length()-1);
}
sb.append(pad).append("{").append("\n");
for (Map.Entry<String, Object> e : properties().entrySet()) {
sb.append(pad).append(" ").append(e.getKey()).append(": ").append(e.getValue()).append(";\n");
}
for (Rule nested : nested()) {
sb.append(nested.toString(indent+2)).append("\n");
}
sb.append(pad).append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((props == null) ? 0 : props.hashCode());
result = prime * result + ((parts == null) ? 0 : parts.hashCode());
result = prime * result + ((selectors == null) ? 0 : selectors.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Rule other = (Rule) obj;
if (parts == null) {
if (other.parts != null)
return false;
} else if (!parts.equals(other.parts))
return false;
if (props == null) {
if (other.props != null)
return false;
} else if (!props.equals(other.props))
return false;
if (selectors == null) {
if (other.selectors != null)
return false;
} else if (!selectors.equals(other.selectors))
return false;
return true;
}
}
|
package org.apache.commons.collections;
import junit.framework.*;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.apache.commons.collections.comparators.ComparableComparator;
import org.apache.commons.collections.comparators.ReverseComparator;
/**
* Tests the BinaryHeap.
*
* @author <a href="mailto:mas@apache.org">Michael A. Smith</a>
* @version $Id: TestBinaryHeap.java,v 1.5 2002/08/17 12:07:24 scolebourne Exp $
*/
public class TestBinaryHeap extends TestCollection {
public static Test suite() {
return new TestSuite(TestBinaryHeap.class);
}
public TestBinaryHeap(String testName) {
super(testName);
}
/**
* Return a new, empty {@link Object} to used for testing.
*/
public Collection makeCollection() {
return new BinaryHeap();
}
public Collection makeConfirmedCollection() {
return new ArrayList();
}
public Collection makeConfirmedFullCollection() {
ArrayList list = new ArrayList();
list.addAll(Arrays.asList(getFullElements()));
return list;
}
public Object[] getFullElements() {
return getFullNonNullStringElements();
}
public Object[] getOtherElements() {
return getOtherNonNullStringElements();
}
public void testCollectionIteratorFailFast() {
}
public void testBasicOps() {
BinaryHeap heap = new BinaryHeap();
assertTrue("heap should be empty after create", heap.isEmpty());
try {
heap.peek();
fail("NoSuchElementException should be thrown if peek is called " +
"before any elements are inserted");
} catch (NoSuchElementException e) {
// expected
}
try {
heap.pop();
fail("NoSuchElementException should be thrown if pop is called " +
"before any elements are inserted");
} catch (NoSuchElementException e) {
// expected
}
heap.insert("a");
heap.insert("c");
heap.insert("e");
heap.insert("b");
heap.insert("d");
heap.insert("n");
heap.insert("m");
heap.insert("l");
heap.insert("k");
heap.insert("j");
heap.insert("i");
heap.insert("h");
heap.insert("g");
heap.insert("f");
assertTrue("heap should not be empty after inserts", !heap.isEmpty());
for(int i = 0; i < 14; i++) {
assertEquals("peek using default constructor should return " +
"minimum value in the binary heap",
String.valueOf((char)('a' + i)), heap.peek());
assertEquals("pop using default constructor should return minimum " +
"value in the binary heap",
String.valueOf((char)('a' + i)), heap.pop());
if(i + 1 < 14) {
assertTrue("heap should not be empty before all elements are popped",
!heap.isEmpty());
} else {
assertTrue("heap should be empty after all elements are popped",
heap.isEmpty());
}
}
try {
heap.peek();
fail("NoSuchElementException should be thrown if peek is called " +
"after all elements are popped");
} catch (NoSuchElementException e) {
// expected
}
try {
heap.pop();
fail("NoSuchElementException should be thrown if pop is called " +
"after all elements are popped");
} catch (NoSuchElementException e) {
// expected
}
}
public void testBasicComparatorOps() {
BinaryHeap heap =
new BinaryHeap(new ReverseComparator(new ComparableComparator()));
assertTrue("heap should be empty after create", heap.isEmpty());
try {
heap.peek();
fail("NoSuchElementException should be thrown if peek is called " +
"before any elements are inserted");
} catch (NoSuchElementException e) {
// expected
}
try {
heap.pop();
fail("NoSuchElementException should be thrown if pop is called " +
"before any elements are inserted");
} catch (NoSuchElementException e) {
// expected
}
heap.insert("a");
heap.insert("c");
heap.insert("e");
heap.insert("b");
heap.insert("d");
heap.insert("n");
heap.insert("m");
heap.insert("l");
heap.insert("k");
heap.insert("j");
heap.insert("i");
heap.insert("h");
heap.insert("g");
heap.insert("f");
assertTrue("heap should not be empty after inserts", !heap.isEmpty());
for(int i = 0; i < 14; i++) {
// note: since we're using a comparator that reverses items, the
// "minimum" item is "n", and the "maximum" item is "a".
assertEquals("peek using default constructor should return " +
"minimum value in the binary heap",
String.valueOf((char)('n' - i)), heap.peek());
assertEquals("pop using default constructor should return minimum " +
"value in the binary heap",
String.valueOf((char)('n' - i)), heap.pop());
if(i + 1 < 14) {
assertTrue("heap should not be empty before all elements are popped",
!heap.isEmpty());
} else {
assertTrue("heap should be empty after all elements are popped",
heap.isEmpty());
}
}
try {
heap.peek();
fail("NoSuchElementException should be thrown if peek is called " +
"after all elements are popped");
} catch (NoSuchElementException e) {
// expected
}
try {
heap.pop();
fail("NoSuchElementException should be thrown if pop is called " +
"after all elements are popped");
} catch (NoSuchElementException e) {
// expected
}
}
public void verify() {
super.verify();
BinaryHeap heap = (BinaryHeap)collection;
Comparator c = heap.comparator();
if (c == null) c = ComparatorUtils.naturalComparator();
if (!heap.m_isMinHeap) c = ComparatorUtils.reversedComparator(c);
Object[] tree = heap.m_elements;
for (int i = 1; i <= heap.m_size; i++) {
Object parent = tree[i];
if (i * 2 <= heap.m_size) {
assertTrue("Parent is less than or equal to its left child",
c.compare(parent, tree[i * 2]) <= 0);
}
if (i * 2 + 1 < heap.m_size) {
assertTrue("Parent is less than or equal to its right child",
c.compare(parent, tree[i * 2 + 1]) <= 0);
}
}
}
}
|
package edu.wustl.catissuecore.action;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import edu.wustl.catissuecore.actionForm.UserForm;
import edu.wustl.catissuecore.bizlogic.BizLogicFactory;
import edu.wustl.catissuecore.bizlogic.UserBizLogic;
import edu.wustl.catissuecore.domain.CancerResearchGroup;
import edu.wustl.catissuecore.domain.Department;
import edu.wustl.catissuecore.domain.Institution;
import edu.wustl.catissuecore.domain.User;
import edu.wustl.catissuecore.multiRepository.bean.SiteUserRolePrivilegeBean;
import edu.wustl.catissuecore.util.CaTissuePrivilegeUtility;
import edu.wustl.catissuecore.util.MSRUtil;
import edu.wustl.catissuecore.util.global.Constants;
import edu.wustl.catissuecore.util.global.DefaultValueManager;
import edu.wustl.common.action.SecureAction;
import edu.wustl.common.beans.SessionDataBean;
import edu.wustl.common.bizlogic.IBizLogic;
import edu.wustl.common.cde.CDEManager;
import edu.wustl.common.dao.DAOFactory;
import edu.wustl.common.dao.HibernateDAO;
import edu.wustl.common.security.PrivilegeCache;
import edu.wustl.common.security.PrivilegeManager;
import edu.wustl.common.util.dbManager.DAOException;
import edu.wustl.common.util.logger.Logger;
/**
* This class initializes the fields in the User Add/Edit webpage.
*
* @author gautam_shetty
*/
public class UserAction extends SecureAction {
/**
* Overrides the execute method of Action class. Sets the various fields in
* User Add/Edit webpage.
*/
protected ActionForward executeSecureAction(ActionMapping mapping,
ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
// Gets the value of the operation parameter.
String operation = request.getParameter(Constants.OPERATION);
String pageOf = (String) request.getParameter(Constants.PAGEOF);
String reqPath = (String) request.getParameter(Constants.REQ_PATH);
String submittedFor = (String) request
.getAttribute(Constants.SUBMITTED_FOR);
String openInCPFrame = (String) request
.getParameter(Constants.OPEN_PAGE_IN_CPFRAME);
UserForm userForm = (UserForm) form;
// method to get myProfile-Add Privilege
SessionDataBean sessionDataBean=getSessionData(request);
// String readOnlyForPrivOnEdit = "";
// String disablePrivButton = "false";
long loggedInUserId = 0;
if((Constants.PAGE_OF_USER).equals(pageOf)&& sessionDataBean.getUserId()!=null)
{
loggedInUserId =sessionDataBean.getUserId();
}
if((Constants.PAGE_OF_USER).equals(pageOf)&& sessionDataBean!=null && loggedInUserId == userForm.getId())
{
pageOf = Constants.PAGEOF_USER_PROFILE;
// readOnlyForPrivOnEdit = "disabled='true'";
// disablePrivButton ="true";
// request.setAttribute("readOnlyForPrivOnEdit", readOnlyForPrivOnEdit);
// request.setAttribute("disablePrivButton", disablePrivButton);
}
// method to get myProfile end here
//method to preserve data on validation
MSRUtil msrUtil = new MSRUtil();
if (operation.equalsIgnoreCase(Constants.ADD)) {
HttpSession session = request.getSession();
boolean dirtyVar = false;
dirtyVar = new Boolean(request.getParameter("dirtyVar"));
if (!dirtyVar) {
session.removeAttribute(Constants.USER_ROW_ID_BEAN_MAP);
}
}
//method to preserve data on validation end here
String formName, prevPage = null, nextPage = null;
boolean roleStatus = false;
if (pageOf.equals(Constants.PAGEOF_APPROVE_USER)) {
Long identifier = (Long) request
.getAttribute(Constants.PREVIOUS_PAGE);
request.setAttribute("prevPageId", identifier);
prevPage = Constants.USER_DETAILS_SHOW_ACTION + "?"
+ Constants.SYSTEM_IDENTIFIER + "=" + identifier;
identifier = (Long) request.getAttribute(Constants.NEXT_PAGE);
nextPage = Constants.USER_DETAILS_SHOW_ACTION + "?"
+ Constants.SYSTEM_IDENTIFIER + "=" + identifier;
request.setAttribute("nextPageId", identifier);
}
if (!pageOf.equals(Constants.PAGEOF_APPROVE_USER)) {
if (operation.equals(Constants.EDIT)
&& (userForm.getCsmUserId() != null)) {
if (userForm.getCsmUserId().longValue() == 0) {
UserBizLogic bizLogic = (UserBizLogic) BizLogicFactory
.getInstance().getBizLogic(Constants.USER_FORM_ID);
String sourceObjName = User.class.getName();
String[] selectColName = { "csmUserId" };
String[] whereColName = { "id" };
String[] whereColCond = { "=" };
Object[] whereColVal = { userForm.getId() };
List regList = bizLogic.retrieve(sourceObjName,
selectColName, whereColName, whereColCond,
whereColVal, Constants.AND_JOIN_CONDITION);
if (regList != null && !regList.isEmpty()) {
Object obj = (Object) regList.get(0);
Long id = (Long) obj;
userForm.setCsmUserId(id);
}
}
}
}
if (operation.equals(Constants.EDIT)) {
if (!pageOf.equals(Constants.PAGEOF_APPROVE_USER)) {
setUserPrivileges(request.getSession(), userForm.getId());
}
if (pageOf.equals(Constants.PAGEOF_APPROVE_USER)) {
formName = Constants.APPROVE_USER_EDIT_ACTION;
} else if (pageOf.equals(Constants.PAGEOF_USER_PROFILE)) {
formName = Constants.USER_EDIT_PROFILE_ACTION;
} else {
formName = Constants.USER_EDIT_ACTION;
}
} else {
if (pageOf.equals(Constants.PAGEOF_APPROVE_USER)) {
formName = Constants.APPROVE_USER_ADD_ACTION;
} else {
formName = Constants.USER_ADD_ACTION;
if (pageOf.equals(Constants.PAGEOF_SIGNUP)) {
formName = Constants.SIGNUP_USER_ADD_ACTION;
}
}
}
if (pageOf.equals(Constants.PAGEOF_APPROVE_USER)
&& (userForm.getStatus().equals(
Constants.APPROVE_USER_PENDING_STATUS)
|| userForm.getStatus().equals(
Constants.APPROVE_USER_REJECT_STATUS) || userForm
.getStatus().equals(Constants.SELECT_OPTION))) {
roleStatus = true;
if (userForm.getStatus().equals(
Constants.APPROVE_USER_PENDING_STATUS)) {
operation = Constants.EDIT;
}
}
if (pageOf.equals(Constants.PAGEOF_USER_PROFILE)) {
roleStatus = true;
}
if (operation.equalsIgnoreCase(Constants.ADD)) {
// request.getSession(true).setAttribute(Constants.USER_ROW_ID_BEAN_MAP,
// null);
if (userForm.getCountry() == null) {
userForm.setCountry((String) DefaultValueManager
.getDefaultValue(Constants.DEFAULT_COUNTRY));
}
}
if (pageOf.equals(Constants.PAGEOF_SIGNUP)) {
userForm.setStatus(Constants.ACTIVITY_STATUS_NEW);
userForm.setActivityStatus(Constants.ACTIVITY_STATUS_NEW);
}
userForm.setOperation(operation);
userForm.setPageOf(pageOf);
userForm.setSubmittedFor(submittedFor);
userForm.setRedirectTo(reqPath);
String roleStatusforJSP = roleStatus + "";
request.setAttribute("roleStatus", roleStatusforJSP);
request.setAttribute("formName", formName);
request.setAttribute("prevPageURL", prevPage);
request.setAttribute("nextPageURL", nextPage);
// Sets the countryList attribute to be used in the Add/Edit User Page.
List countryList = CDEManager.getCDEManager().getPermissibleValueList(
Constants.CDE_NAME_COUNTRY_LIST, null);
request.setAttribute("countryList", countryList);
// Sets the stateList attribute to be used in the Add/Edit User Page.
List stateList = CDEManager.getCDEManager().getPermissibleValueList(
Constants.CDE_NAME_STATE_LIST, null);
request.setAttribute("stateList", stateList);
// Sets the pageOf attribute (for Add,Edit or Query Interface).
String target = pageOf;
IBizLogic bizLogic = BizLogicFactory.getInstance().getBizLogic(
Constants.USER_FORM_ID);
// Sets the instituteList attribute to be used in the Add/Edit User
// Page.
String sourceObjectName = Institution.class.getName();
String[] displayNameFields = { Constants.NAME };
String valueField = Constants.SYSTEM_IDENTIFIER;
List instituteList = bizLogic.getList(sourceObjectName,
displayNameFields, valueField, false);
request.setAttribute("instituteList", instituteList);
// Sets the departmentList attribute to be used in the Add/Edit User
// Page.
sourceObjectName = Department.class.getName();
List departmentList = bizLogic.getList(sourceObjectName,
displayNameFields, valueField, false);
request.setAttribute("departmentList", departmentList);
// Sets the cancerResearchGroupList attribute to be used in the Add/Edit
// User Page.
sourceObjectName = CancerResearchGroup.class.getName();
List cancerResearchGroupList = bizLogic.getList(sourceObjectName,
displayNameFields, valueField, false);
request
.setAttribute("cancerResearchGroupList",
cancerResearchGroupList);
// Populate the activity status dropdown if the operation is edit
// and the user page is of administrative tab.
if (operation.equals(Constants.EDIT)
&& pageOf.equals(Constants.PAGEOF_USER_ADMIN)) {
String activityStatusList = Constants.ACTIVITYSTATUSLIST;
request.setAttribute("activityStatusList",
Constants.USER_ACTIVITY_STATUS_VALUES);
}
// Populate the role dropdown if the page is of approve user or
// (Add/Edit) user page of adminitraive tab.
// if (pageOf.equals(Constants.PAGEOF_APPROVE_USER) ||
// pageOf.equals(Constants.PAGEOF_USER_ADMIN)
// ||pageOf.equals(Constants.PAGEOF_USER_PROFILE ))
// List roleNameValueBeanList = getRoles();
// request.setAttribute("roleList", roleNameValueBeanList);
// Populate the status dropdown for approve user
// page.(Approve,Reject,Pending)
if (pageOf.equals(Constants.PAGEOF_APPROVE_USER)) {
request.setAttribute("statusList",
Constants.APPROVE_USER_STATUS_VALUES);
}
Logger.out.debug("pageOf :
// To show Role as Scientist
HibernateDAO dao = (HibernateDAO) DAOFactory.getInstance().getDAO(Constants.HIBERNATE_DAO);
dao.openSession(sessionDataBean);
List<User> userList = dao.retrieve(User.class.getName(), "emailAddress" , userForm.getEmailAddress());
if(!userList.isEmpty())
{
User user = userList.get(0);
if(!user.getRoleId().equals(Constants.ADMIN_USER))
{
if(user.getSiteCollection().isEmpty())
{
userForm.setRole(Constants.NON_ADMIN_USER);
}
}
}
dao.closeSession();
// For Privilege
String roleId = userForm.getRole();
boolean flagForSARole = false;
if((Constants.SUPER_ADMIN_USER).equals(roleId))
{
flagForSARole = true;
// To show empty summary in case User is Super Administrator
request.getSession(true).setAttribute(Constants.USER_ROW_ID_BEAN_MAP, null);
}
request.setAttribute("flagForSARole", flagForSARole);
msrUtil.onFirstTimeLoad(mapping, request);
final String cpOperation = (String) request.getParameter("cpOperation");
if (cpOperation != null) {
return msrUtil.setAJAXResponse(request, response, cpOperation);
}
// Parameters for JSP
int SELECT_OPTION_VALUE = Constants.SELECT_OPTION_VALUE;
boolean readOnlyEmail = false;
if (operation.equals(Constants.EDIT)
&& pageOf.equals(Constants.PAGEOF_USER_PROFILE)) {
readOnlyEmail = true;
}
request.setAttribute("SELECT_OPTION_VALUE", SELECT_OPTION_VALUE);
request.setAttribute("Approve", Constants.APPROVE_USER_APPROVE_STATUS);
request
.setAttribute("pageOfApproveUser",
Constants.PAGEOF_APPROVE_USER);
request.setAttribute("backPage", Constants.APPROVE_USER_SHOW_ACTION
+ "?" + Constants.PAGE_NUMBER + "=" + Constants.START_PAGE);
request.setAttribute("redirectTo", Constants.REQ_PATH);
request.setAttribute("addforJSP", Constants.ADD);
request.setAttribute("editforJSP", Constants.EDIT);
request.setAttribute("searchforJSP", Constants.SEARCH);
request.setAttribute("readOnlyEmail", readOnlyEmail);
request
.setAttribute("pageOfUserProfile",
Constants.PAGEOF_USER_PROFILE);
request.setAttribute("pageOfUserAdmin", Constants.PAGEOF_USER_ADMIN);
request.setAttribute("pageOfSignUp", Constants.PAGEOF_SIGNUP);
request.setAttribute("pageOf", pageOf);
request.setAttribute("operation", operation);
request.setAttribute("openInCPFrame", openInCPFrame);
Logger.out.debug("USerAction redirect :
if(openInCPFrame != null && Constants.TRUE.equalsIgnoreCase(openInCPFrame))
target=Constants.OPEN_PAGE_IN_CPFRAME;
return mapping.findForward(target);
}
private void setUserPrivileges(HttpSession session, long id)
{
if (id == 0)
{
return;
}
try
{
IBizLogic bizLogic = BizLogicFactory.getInstance().getBizLogic(Constants.USER_FORM_ID);
User user = (User)bizLogic.retrieve(User.class.getName(), id);
String role = user.getRoleId();
if (role != null && !role.equalsIgnoreCase(Constants.ADMIN_USER))
{
PrivilegeManager privilegeManager = PrivilegeManager.getInstance();
// privilegeManager.removePrivilegeCache(user.getLoginName());
PrivilegeCache privilegeCache = privilegeManager.getPrivilegeCache(user.getLoginName());
privilegeCache.refresh();
Map<String, SiteUserRolePrivilegeBean> privilegeMap = CaTissuePrivilegeUtility.getAllPrivileges(privilegeCache);
session.setAttribute(Constants.USER_ROW_ID_BEAN_MAP, privilegeMap);
}
}
catch(DAOException e)
{
e.printStackTrace();
}
catch (Exception e)
{
e.printStackTrace();
}
}
/*
* (non-Javadoc)
*
* @see edu.wustl.catissuecore.action.SecureAction#isAuthorizedToExecute(javax.servlet.http.HttpServletRequest)
*/
protected boolean isAuthorizedToExecute(HttpServletRequest request)
throws Exception {
String pageOf = request.getParameter(Constants.PAGEOF);
if (pageOf.equals(Constants.PAGEOF_USER_ADMIN)) {
return super.isAuthorizedToExecute(request);
}
return true;
}
/*
* (non-Javadoc)
*
* @see edu.wustl.catissuecore.action.BaseAction#getSessionData(javax.servlet.http.HttpServletRequest)
*/
protected SessionDataBean getSessionData(HttpServletRequest request) {
String pageOf = request.getParameter(Constants.PAGEOF);
if (pageOf.equals(Constants.PAGEOF_USER_ADMIN)) {
return super.getSessionData(request);
}
return new SessionDataBean();
}
}
|
package org.batfish.main;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.batfish.common.BfConsts;
public class Settings {
private static final String ARG_ACCEPT_NODE = "acceptnode";
private static final String ARG_ANONYMIZE = "anonymize";
private static final String ARG_AUTO_BASE_DIR = "autobasedir";
private static final String ARG_BLACK_HOLE = "blackhole";
private static final String ARG_BLACK_HOLE_PATH = "blackholepath";
private static final String ARG_BLACKLIST_DST_IP_PATH = "blacklistdstippath";
private static final String ARG_BLACKLIST_INTERFACE = "blint";
private static final String ARG_BLACKLIST_NODE = "blnode";
private static final String ARG_BUILD_PREDICATE_INFO = "bpi";
private static final String ARG_CB_HOST = "lbhost";
private static final String ARG_CB_PORT = "lbport";
private static final String ARG_COMPILE = "compile";
private static final String ARG_CONC_UNIQUE = "concunique";
private static final String ARG_COUNT = "count";
private static final String ARG_DATA_PLANE = "dp";
private static final String ARG_DATA_PLANE_DIR = "dpdir";
private static final String ARG_DISABLE_Z3_SIMPLIFICATION = "nosimplify";
private static final String ARG_DUMP_CONTROL_PLANE_FACTS = "dumpcp";
private static final String ARG_DUMP_FACTS_DIR = "dumpdir";
private static final String ARG_DUMP_IF = "dumpif";
private static final String ARG_DUMP_IF_DIR = "dumpifdir";
private static final String ARG_DUMP_INTERFACE_DESCRIPTIONS = "id";
private static final String ARG_DUMP_INTERFACE_DESCRIPTIONS_PATH = "idpath";
private static final String ARG_DUMP_TRAFFIC_FACTS = "dumptraffic";
private static final String ARG_DUPLICATE_ROLE_FLOWS = "drf";
private static final String ARG_EXIT_ON_PARSE_ERROR = "ee";
private static final String ARG_FACTS = "facts";
private static final String ARG_FLATTEN = "flatten";
private static final String ARG_FLATTEN_DESTINATION = "flattendst";
private static final String ARG_FLATTEN_ON_THE_FLY = "flattenonthefly";
private static final String ARG_FLATTEN_SOURCE = "flattensrc";
private static final String ARG_FLOW_PATH = "flowpath";
private static final String ARG_FLOW_SINK_PATH = "flowsink";
private static final String ARG_FLOWS = "flow";
private static final String ARG_GEN_OSPF = "genospf";
private static final String ARG_GENERATE_STUBS = "gs";
private static final String ARG_GENERATE_STUBS_INPUT_ROLE = "gsinputrole";
private static final String ARG_GENERATE_STUBS_INTERFACE_DESCRIPTION_REGEX = "gsidregex";
private static final String ARG_GENERATE_STUBS_REMOTE_AS = "gsremoteas";
private static final String ARG_GUI = "gui";
private static final String ARG_HELP = "help";
private static final String ARG_HISTOGRAM = "histogram";
private static final String ARG_IGNORE_UNSUPPORTED = "ignoreunsupported";
private static final String ARG_INTERFACE_MAP_PATH = "impath";
private static final String ARG_LB_WEB_ADMIN_PORT = "lbwebadminport";
private static final String ARG_LB_WEB_PORT = "lbwebport";
private static final String ARG_LOG_FILE = "logfile";
private static final String ARG_LOG_LEVEL = "loglevel";
private static final String ARG_LOGICDIR = "logicdir";
private static final String ARG_MPI = "mpi";
private static final String ARG_MPI_PATH = "mpipath";
private static final String ARG_NO_TRAFFIC = "notraffic";
private static final String ARG_NODE_ROLES_PATH = "nrpath";
private static final String ARG_NODE_SET_PATH = "nodes";
private static final String ARG_PEDANTIC_AS_ERROR = "pedanticerror";
private static final String ARG_PEDANTIC_SUPPRESS = "pedanticsuppress";
private static final String ARG_PREDHELP = "predhelp";
private static final String ARG_PREDICATES = "predicates";
private static final String ARG_PRINT_PARSE_TREES = "ppt";
private static final String ARG_QUERY = "query";
private static final String ARG_QUERY_ALL = "all";
private static final String ARG_REACH = "reach";
private static final String ARG_REACH_PATH = "reachpath";
private static final String ARG_RED_FLAG_AS_ERROR = "redflagerror";
private static final String ARG_RED_FLAG_SUPPRESS = "redflagsuppress";
private static final String ARG_REDIRECT_STDERR = "redirect";
private static final String ARG_REMOVE_FACTS = "remove";
private static final String ARG_REVERT = "revert";
private static final String ARG_ROLE_HEADERS = "rh";
private static final String ARG_ROLE_NODES_PATH = "rnpath";
private static final String ARG_ROLE_REACHABILITY_QUERY = "rr";
private static final String ARG_ROLE_REACHABILITY_QUERY_PATH = "rrpath";
private static final String ARG_ROLE_SET_PATH = "rspath";
private static final String ARG_ROLE_TRANSIT_QUERY = "rt";
private static final String ARG_ROLE_TRANSIT_QUERY_PATH = "rtpath";
private static final String ARG_SERIALIZE_INDEPENDENT = "si";
private static final String ARG_SERIALIZE_INDEPENDENT_PATH = "sipath";
private static final String ARG_SERIALIZE_TO_TEXT = "stext";
private static final String ARG_SERIALIZE_VENDOR = "sv";
private static final String ARG_SERIALIZE_VENDOR_PATH = "svpath";
private static final String ARG_SERVICE_MODE = "servicemode";
private static final String ARG_SERVICE_PORT = "serviceport";
private static final String ARG_SERVICE_URL = "serviceurl";
private static final String ARG_TEST_RIG_PATH = "testrig";
private static final String ARG_THROW_ON_LEXER_ERROR = "throwlexer";
private static final String ARG_THROW_ON_PARSER_ERROR = "throwparser";
private static final String ARG_TIMESTAMP = "timestamp";
private static final String ARG_UNIMPLEMENTED_AS_ERROR = "unimplementederror";
private static final String ARG_UNIMPLEMENTED_SUPPRESS = "unimplementedsuppress";
private static final String ARG_UPDATE = "update";
private static final String ARG_VAR_SIZE_MAP_PATH = "vsmpath";
private static final String ARG_WORKSPACE = "workspace";
private static final String ARG_Z3 = "z3";
private static final String ARG_Z3_CONCRETIZE = "conc";
private static final String ARG_Z3_CONCRETIZER_INPUT_FILES = "concin";
private static final String ARG_Z3_CONCRETIZER_NEGATED_INPUT_FILES = "concinneg";
private static final String ARG_Z3_CONCRETIZER_OUTPUT_FILE = "concout";
private static final String ARG_Z3_OUTPUT = "z3path";
private static final String ARGNAME_ACCEPT_NODE = "node";
private static final String ARGNAME_ANONYMIZE = "path";
private static final String ARGNAME_AUTO_BASE_DIR = "path";
private static final String ARGNAME_BLACK_HOLE_PATH = "path";
private static final String ARGNAME_BLACKLIST_DST_IP = "ip";
private static final String ARGNAME_BLACKLIST_INTERFACE = "node,interface";
private static final String ARGNAME_BLACKLIST_NODE = "node";
private static final String ARGNAME_BUILD_PREDICATE_INFO = "path";
private static final String ARGNAME_DATA_PLANE_DIR = "path";
private static final String ARGNAME_DUMP_FACTS_DIR = "path";
private static final String ARGNAME_DUMP_IF_DIR = "path";
private static final String ARGNAME_DUMP_INTERFACE_DESCRIPTIONS_PATH = "path";
private static final String ARGNAME_FLATTEN_DESTINATION = "path";
private static final String ARGNAME_FLATTEN_SOURCE = "path";
private static final String ARGNAME_FLOW_PATH = "path";
private static final String ARGNAME_FLOW_SINK_PATH = "path";
private static final String ARGNAME_GEN_OSPF = "path";
private static final String ARGNAME_GENERATE_STUBS_INPUT_ROLE = "role";
private static final String ARGNAME_GENERATE_STUBS_INTERFACE_DESCRIPTION_REGEX = "java-regex";
private static final String ARGNAME_GENERATE_STUBS_REMOTE_AS = "as";
private static final String ARGNAME_INTERFACE_MAP_PATH = "path";
private static final String ARGNAME_LB_WEB_ADMIN_PORT = "port";
private static final String ARGNAME_LB_WEB_PORT = "port";
private static final String ARGNAME_LOG_FILE = "path";
private static final String ARGNAME_LOG_LEVEL = "level";
private static final String ARGNAME_LOGICDIR = "path";
private static final String ARGNAME_MPI_PATH = "path";
private static final String ARGNAME_NODE_ROLES_PATH = "path";
private static final String ARGNAME_NODE_SET_PATH = "path";
private static final String ARGNAME_REACH_PATH = "path";
private static final String ARGNAME_REVERT = "branch-name";
private static final String ARGNAME_ROLE_NODES_PATH = "path";
private static final String ARGNAME_ROLE_REACHABILITY_QUERY_PATH = "path";
private static final String ARGNAME_ROLE_SET_PATH = "path";
private static final String ARGNAME_ROLE_TRANSIT_QUERY_PATH = "path";
private static final String ARGNAME_SERIALIZE_INDEPENDENT_PATH = "path";
private static final String ARGNAME_SERIALIZE_VENDOR_PATH = "path";
private static final String ARGNAME_VAR_SIZE_MAP_PATH = "path";
private static final String ARGNAME_Z3_CONCRETIZER_INPUT_FILES = "paths";
private static final String ARGNAME_Z3_CONCRETIZER_NEGATED_INPUT_FILES = "paths";
private static final String ARGNAME_Z3_CONCRETIZER_OUTPUT_FILE = "path";
private static final String ARGNAME_Z3_OUTPUT = "path";
public static final String DEFAULT_CONNECTBLOX_ADMIN_PORT = "55181";
public static final String DEFAULT_CONNECTBLOX_HOST = "localhost";
public static final String DEFAULT_CONNECTBLOX_REGULAR_PORT = "55179";
private static final String DEFAULT_DATA_PLANE_DIR = "dp";
private static final String DEFAULT_DUMP_FACTS_DIR = "facts";
private static final String DEFAULT_DUMP_IF_DIR = "if";
private static final String DEFAULT_DUMP_INTERFACE_DESCRIPTIONS_PATH = "interface_descriptions";
private static final String DEFAULT_FLOW_PATH = "flows";
private static final String DEFAULT_LB_WEB_ADMIN_PORT = "55183";
private static final String DEFAULT_LB_WEB_PORT = "8080";
private static final String DEFAULT_LOG_LEVEL = "debug";
private static final List<String> DEFAULT_PREDICATES = Collections
.singletonList("InstalledRoute");
private static final String DEFAULT_SERIALIZE_INDEPENDENT_PATH = "serialized-independent-configs";
private static final String DEFAULT_SERIALIZE_VENDOR_PATH = "serialized-vendor-configs";
private static final String DEFAULT_SERVICE_PORT = BfConsts.SVC_PORT
.toString();
private static final String DEFAULT_SERVICE_URL = "http://localhost";
private static final String DEFAULT_TEST_RIG_PATH = "default_test_rig";
private static final String DEFAULT_Z3_OUTPUT = "z3-dataplane-output.smt2";
private static final boolean DEFAULT_Z3_SIMPLIFY = true;
private static final String EXECUTABLE_NAME = "batfish";
private String _acceptNode;
private boolean _anonymize;
private String _anonymizeDir;
private String _autoBaseDir;
private boolean _blackHole;
private String _blackHolePath;
private String _blacklistDstIpPath;
private String _blacklistInterface;
private String _blacklistNode;
private boolean _buildPredicateInfo;
private boolean _canExecute;
private String _cbHost;
private int _cbPort;
private boolean _compile;
private boolean _concretize;
private String[] _concretizerInputFilePaths;
private String _concretizerOutputFilePath;
private boolean _concUnique;
private boolean _counts;
private boolean _dataPlane;
private String _dataPlaneDir;
private boolean _dumpControlPlaneFacts;
private String _dumpFactsDir;
private boolean _dumpIF;
private String _dumpIFDir;
private boolean _dumpInterfaceDescriptions;
private String _dumpInterfaceDescriptionsPath;
private boolean _dumpTrafficFacts;
private boolean _duplicateRoleFlows;
private boolean _exitOnParseError;
private boolean _facts;
private boolean _flatten;
private String _flattenDestination;
private boolean _flattenOnTheFly;
private String _flattenSource;
private String _flowPath;
private boolean _flows;
private String _flowSinkPath;
private boolean _generateStubs;
private String _generateStubsInputRole;
private String _generateStubsInterfaceDescriptionRegex;
private Integer _generateStubsRemoteAs;
private boolean _genMultipath;
private String _genOspfTopology;
private List<String> _helpPredicates;
private boolean _histogram;
private String _hsaInputDir;
private String _hsaOutputDir;
private boolean _ignoreUnsupported;
private String _interfaceMapPath;
private int _lbWebAdminPort;
private int _lbWebPort;
private String _logFile;
private BatfishLogger _logger;
private String _logicDir;
private String _logicSrcDir;
private String _logLevel;
private String _mpiPath;
private String[] _negatedConcretizerInputFilePaths;
private String _nodeRolesPath;
private String _nodeSetPath;
private boolean _noTraffic;
private Options _options;
private boolean _pedanticAsError;
private boolean _pedanticRecord;
private List<String> _predicates;
private boolean _printParseTree;
private boolean _printSemantics;
private boolean _query;
private boolean _queryAll;
private boolean _reach;
private String _reachPath;
private boolean _redFlagAsError;
private boolean _redFlagRecord;
private boolean _redirectStdErr;
private boolean _removeFacts;
private boolean _revert;
private String _revertBranchName;
private boolean _roleHeaders;
private String _roleNodesPath;
private boolean _roleReachabilityQuery;
private String _roleReachabilityQueryPath;
private String _roleSetPath;
private boolean _roleTransitQuery;
private String _roleTransitQueryPath;
private boolean _runInServiceMode;
private boolean _serializeIndependent;
private String _serializeIndependentPath;
private boolean _serializeToText;
private boolean _serializeVendor;
private String _serializeVendorPath;
private int _servicePort;
private String _serviceUrl;
private boolean _simplify;
private String _testRigPath;
private boolean _throwOnLexerError;
private boolean _throwOnParserError;
private boolean _timestamp;
private boolean _unimplementedAsError;
private boolean _unimplementedRecord;
private boolean _update;
private String _varSizeMapPath;
private String _workspaceName;
private boolean _z3;
private String _z3File;
public Settings() throws ParseException {
this(new String[] {});
}
public Settings(String[] args) throws ParseException {
initOptions();
parseCommandLine(args);
}
public boolean canExecute() {
return _canExecute;
}
public boolean concretizeUnique() {
return _concUnique;
}
public boolean createWorkspace() {
return _compile;
}
public boolean dumpInterfaceDescriptions() {
return _dumpInterfaceDescriptions;
}
public boolean duplicateRoleFlows() {
return _duplicateRoleFlows;
}
public boolean exitOnParseError() {
return _exitOnParseError;
}
public boolean flattenOnTheFly() {
return _flattenOnTheFly;
}
public String getAcceptNode() {
return _acceptNode;
}
public boolean getAnonymize() {
return _anonymize;
}
public String getAnonymizeDir() {
return _anonymizeDir;
}
public String getAutoBaseDir() {
return _autoBaseDir;
}
public String getBlackHoleQueryPath() {
return _blackHolePath;
}
public String getBlacklistDstIpPath() {
return _blacklistDstIpPath;
}
public String getBlacklistInterfaceString() {
return _blacklistInterface;
}
public String getBlacklistNode() {
return _blacklistNode;
}
public String getBranchName() {
return _revertBranchName;
}
public boolean getBuildPredicateInfo() {
return _buildPredicateInfo;
}
public boolean getConcretize() {
return _concretize;
}
public String[] getConcretizerInputFilePaths() {
return _concretizerInputFilePaths;
}
public String getConcretizerOutputFilePath() {
return _concretizerOutputFilePath;
}
public String getConnectBloxHost() {
return _cbHost;
}
public int getConnectBloxPort() {
return _cbPort;
}
public boolean getCountsOnly() {
return _counts;
}
public boolean getDataPlane() {
return _dataPlane;
}
public String getDataPlaneDir() {
return _dataPlaneDir;
}
public boolean getDumpControlPlaneFacts() {
return _dumpControlPlaneFacts;
}
public String getDumpFactsDir() {
return _dumpFactsDir;
}
public String getDumpIFDir() {
return _dumpIFDir;
}
public String getDumpInterfaceDescriptionsPath() {
return _dumpInterfaceDescriptionsPath;
}
public boolean getDumpTrafficFacts() {
return _dumpTrafficFacts;
}
public boolean getFacts() {
return _facts;
}
public boolean getFlatten() {
return _flatten;
}
public String getFlattenDestination() {
return _flattenDestination;
}
public String getFlattenSource() {
return _flattenSource;
}
public String getFlowPath() {
return _flowPath;
}
public boolean getFlows() {
return _flows;
}
public String getFlowSinkPath() {
return _flowSinkPath;
}
public boolean getGenerateMultipathInconsistencyQuery() {
return _genMultipath;
}
public String getGenerateOspfTopologyPath() {
return _genOspfTopology;
}
public boolean getGenerateStubs() {
return _generateStubs;
}
public String getGenerateStubsInputRole() {
return _generateStubsInputRole;
}
public String getGenerateStubsInterfaceDescriptionRegex() {
return _generateStubsInterfaceDescriptionRegex;
}
public int getGenerateStubsRemoteAs() {
return _generateStubsRemoteAs;
}
public List<String> getHelpPredicates() {
return _helpPredicates;
}
public boolean getHistogram() {
return _histogram;
}
public String getHSAInputPath() {
return _hsaInputDir;
}
public String getHSAOutputPath() {
return _hsaOutputDir;
}
public boolean getInterfaceFailureInconsistencyBlackHoleQuery() {
return _blackHole;
}
public boolean getInterfaceFailureInconsistencyReachableQuery() {
return _reach;
}
public String getInterfaceMapPath() {
return _interfaceMapPath;
}
public int getLbWebAdminPort() {
return _lbWebAdminPort;
}
public int getLbWebPort() {
return _lbWebPort;
}
public String getLogFile() {
return _logFile;
}
public BatfishLogger getLogger() {
return _logger;
}
public String getLogicDir() {
return _logicDir;
}
public String getLogicSrcDir() {
return _logicSrcDir;
}
public String getLogLevel() {
return _logLevel;
}
public String getMultipathInconsistencyQueryPath() {
return _mpiPath;
}
public String[] getNegatedConcretizerInputFilePaths() {
return _negatedConcretizerInputFilePaths;
}
public String getNodeRolesPath() {
return _nodeRolesPath;
}
public String getNodeSetPath() {
return _nodeSetPath;
}
public boolean getNoTraffic() {
return _noTraffic;
}
public boolean getPedanticAsError() {
return _pedanticAsError;
}
public boolean getPedanticRecord() {
return _pedanticRecord;
}
public List<String> getPredicates() {
return _predicates;
}
public boolean getPrintSemantics() {
return _printSemantics;
}
public boolean getQuery() {
return _query;
}
public boolean getQueryAll() {
return _queryAll;
}
public String getReachableQueryPath() {
return _reachPath;
}
public boolean getRedFlagAsError() {
return _redFlagAsError;
}
public boolean getRedFlagRecord() {
return _redFlagRecord;
}
public boolean getRemoveFacts() {
return _removeFacts;
}
public boolean getRoleHeaders() {
return _roleHeaders;
}
public String getRoleNodesPath() {
return _roleNodesPath;
}
public boolean getRoleReachabilityQuery() {
return _roleReachabilityQuery;
}
public String getRoleReachabilityQueryPath() {
return _roleReachabilityQueryPath;
}
public String getRoleSetPath() {
return _roleSetPath;
}
public boolean getRoleTransitQuery() {
return _roleTransitQuery;
}
public String getRoleTransitQueryPath() {
return _roleTransitQueryPath;
}
public boolean getSerializeIndependent() {
return _serializeIndependent;
}
public String getSerializeIndependentPath() {
return _serializeIndependentPath;
}
public boolean getSerializeToText() {
return _serializeToText;
}
public boolean getSerializeVendor() {
return _serializeVendor;
}
public String getSerializeVendorPath() {
return _serializeVendorPath;
}
public int getServicePort() {
return _servicePort;
}
public String getServiceUrl() {
return _serviceUrl;
}
public boolean getSimplify() {
return _simplify;
}
public String getTestRigPath() {
return _testRigPath;
}
public boolean getThrowOnLexerError() {
return _throwOnLexerError;
}
public boolean getThrowOnParserError() {
return _throwOnParserError;
}
public boolean getTimestamp() {
return _timestamp;
}
public boolean getUnimplementedAsError() {
return _unimplementedAsError;
}
public boolean getUnimplementedRecord() {
return _unimplementedRecord;
}
public boolean getUpdate() {
return _update;
}
public String getVarSizeMapPath() {
return _varSizeMapPath;
}
public String getWorkspaceName() {
return _workspaceName;
}
public boolean getZ3() {
return _z3;
}
public String getZ3File() {
return _z3File;
}
public boolean ignoreUnsupported() {
return _ignoreUnsupported;
}
private void initOptions() {
_options = new Options();
_options.addOption(Option
.builder()
.argName("predicates")
.hasArgs()
.desc("list of LogicBlox predicates to query (defaults to '"
+ DEFAULT_PREDICATES.get(0) + "')").longOpt(ARG_PREDICATES)
.build());
_options.addOption(Option
.builder()
.argName("path")
.hasArg()
.desc("path to test rig directory (defaults to \""
+ DEFAULT_TEST_RIG_PATH + "\")").longOpt(ARG_TEST_RIG_PATH)
.build());
_options
.addOption(Option.builder().argName("name").hasArg()
.desc("name of LogicBlox workspace").longOpt(ARG_WORKSPACE)
.build());
_options.addOption(Option.builder().argName("hostname").hasArg()
.desc("hostname of ConnectBlox server for regular session")
.longOpt(ARG_CB_HOST).build());
_options.addOption(Option.builder().argName("port_number").hasArg()
.desc("port of ConnectBlox server for regular session")
.longOpt(ARG_CB_PORT).build());
_options.addOption(Option.builder().argName(ARGNAME_LB_WEB_PORT).hasArg()
.desc("port of lb-web server").longOpt(ARG_LB_WEB_PORT).build());
_options.addOption(Option.builder().argName(ARGNAME_LB_WEB_ADMIN_PORT)
.hasArg().desc("admin port lb-web server")
.longOpt(ARG_LB_WEB_ADMIN_PORT).build());
_options
.addOption(Option
.builder()
.argName("predicates")
.optionalArg(true)
.hasArgs()
.desc("print semantics for all predicates, or for predicates supplied as optional arguments")
.longOpt(ARG_PREDHELP).build());
_options.addOption(Option.builder().desc("print this message")
.longOpt(ARG_HELP).build());
_options.addOption(Option.builder().desc("query workspace")
.longOpt(ARG_QUERY).build());
_options.addOption(Option.builder()
.desc("return predicate cardinalities instead of contents")
.longOpt(ARG_COUNT).build());
_options.addOption(Option.builder().desc("query ALL predicates")
.longOpt(ARG_QUERY_ALL).build());
_options.addOption(Option.builder()
.desc("create workspace and add project logic")
.longOpt(ARG_COMPILE).build());
_options.addOption(Option.builder().desc("add facts to workspace")
.longOpt(ARG_FACTS).build());
_options.addOption(Option.builder()
.desc("remove facts instead of adding them")
.longOpt(ARG_REMOVE_FACTS).build());
_options.addOption(Option.builder().desc("display results in GUI")
.longOpt(ARG_GUI).build());
_options.addOption(Option.builder()
.desc("differentially update test rig workspace")
.longOpt(ARG_UPDATE).build());
_options.addOption(Option.builder()
.desc("do not add injected traffic facts").longOpt(ARG_NO_TRAFFIC)
.build());
_options
.addOption(Option
.builder()
.desc("exit on first parse error (otherwise will exit on last parse error)")
.longOpt(ARG_EXIT_ON_PARSE_ERROR).build());
_options.addOption(Option.builder().desc("generate z3 data plane logic")
.longOpt(ARG_Z3).build());
_options.addOption(Option.builder().argName(ARGNAME_Z3_OUTPUT).hasArg()
.desc("set z3 data plane logic output file").longOpt(ARG_Z3_OUTPUT)
.build());
_options.addOption(Option.builder()
.argName(ARGNAME_Z3_CONCRETIZER_INPUT_FILES).hasArgs()
.desc("set z3 concretizer input file(s)")
.longOpt(ARG_Z3_CONCRETIZER_INPUT_FILES).build());
_options.addOption(Option.builder()
.argName(ARGNAME_Z3_CONCRETIZER_NEGATED_INPUT_FILES).hasArgs()
.desc("set z3 negated concretizer input file(s)")
.longOpt(ARG_Z3_CONCRETIZER_NEGATED_INPUT_FILES).build());
_options.addOption(Option.builder()
.argName(ARGNAME_Z3_CONCRETIZER_OUTPUT_FILE).hasArg()
.desc("set z3 concretizer output file")
.longOpt(ARG_Z3_CONCRETIZER_OUTPUT_FILE).build());
_options.addOption(Option.builder()
.desc("create z3 logic to concretize data plane constraints")
.longOpt(ARG_Z3_CONCRETIZE).build());
_options.addOption(Option.builder()
.desc("push concrete flows into logicblox databse")
.longOpt(ARG_FLOWS).build());
_options.addOption(Option.builder().argName(ARGNAME_FLOW_PATH).hasArg()
.desc("path to concrete flows").longOpt(ARG_FLOW_PATH).build());
_options.addOption(Option.builder().argName(ARGNAME_FLOW_SINK_PATH)
.hasArg().desc("path to flow sinks").longOpt(ARG_FLOW_SINK_PATH)
.build());
_options.addOption(Option.builder()
.desc("dump intermediate format of configurations")
.longOpt(ARG_DUMP_IF).build());
_options.addOption(Option.builder().argName(ARGNAME_DUMP_IF_DIR).hasArg()
.desc("directory to dump intermediate format files")
.longOpt(ARG_DUMP_IF_DIR).build());
_options.addOption(Option.builder().desc("dump control plane facts")
.longOpt(ARG_DUMP_CONTROL_PLANE_FACTS).build());
_options.addOption(Option.builder().desc("dump traffic facts")
.longOpt(ARG_DUMP_TRAFFIC_FACTS).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_DUMP_FACTS_DIR)
.desc("directory to dump LogicBlox facts")
.longOpt(ARG_DUMP_FACTS_DIR).build());
_options.addOption(Option.builder().hasArg().argName(ARGNAME_REVERT)
.desc("revert test rig workspace to specified branch")
.longOpt(ARG_REVERT).build());
_options.addOption(Option.builder().desc("redirect stderr to stdout")
.longOpt(ARG_REDIRECT_STDERR).build());
_options.addOption(Option.builder().hasArg().argName(ARGNAME_ANONYMIZE)
.desc("created anonymized versions of configs in test rig")
.longOpt(ARG_ANONYMIZE).build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_LOGICDIR)
.desc("set logic dir with respect to filesystem of machine running LogicBlox")
.longOpt(ARG_LOGICDIR).build());
_options.addOption(Option.builder().desc("disable z3 simplification")
.longOpt(ARG_DISABLE_Z3_SIMPLIFICATION).build());
_options.addOption(Option.builder().desc("serialize vendor configs")
.longOpt(ARG_SERIALIZE_VENDOR).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_SERIALIZE_VENDOR_PATH)
.desc("path to read or write serialized vendor configs")
.longOpt(ARG_SERIALIZE_VENDOR_PATH).build());
_options.addOption(Option.builder()
.desc("serialize vendor-independent configs")
.longOpt(ARG_SERIALIZE_INDEPENDENT).build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_SERIALIZE_INDEPENDENT_PATH)
.desc("path to read or write serialized vendor-independent configs")
.longOpt(ARG_SERIALIZE_INDEPENDENT_PATH).build());
_options.addOption(Option.builder()
.desc("compute and serialize data plane (requires logicblox)")
.longOpt(ARG_DATA_PLANE).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_DATA_PLANE_DIR)
.desc("path to read or write serialized data plane")
.longOpt(ARG_DATA_PLANE_DIR).build());
_options.addOption(Option.builder().desc("print parse trees")
.longOpt(ARG_PRINT_PARSE_TREES).build());
_options.addOption(Option.builder().desc("dump interface descriptions")
.longOpt(ARG_DUMP_INTERFACE_DESCRIPTIONS).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_DUMP_INTERFACE_DESCRIPTIONS_PATH)
.desc("path to read or write interface descriptions")
.longOpt(ARG_DUMP_INTERFACE_DESCRIPTIONS_PATH).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_NODE_SET_PATH)
.desc("path to read or write node set").longOpt(ARG_NODE_SET_PATH)
.build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_INTERFACE_MAP_PATH)
.desc("path to read or write interface-number mappings")
.longOpt(ARG_INTERFACE_MAP_PATH).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_VAR_SIZE_MAP_PATH)
.desc("path to read or write var-size mappings")
.longOpt(ARG_VAR_SIZE_MAP_PATH).build());
_options.addOption(Option.builder()
.desc("generate multipath-inconsistency query").longOpt(ARG_MPI)
.build());
_options.addOption(Option.builder().hasArg().argName(ARGNAME_MPI_PATH)
.desc("path to read or write multipath-inconsistency query")
.longOpt(ARG_MPI_PATH).build());
_options.addOption(Option.builder().desc("serialize to text")
.longOpt(ARG_SERIALIZE_TO_TEXT).build());
_options.addOption(Option.builder().desc("run in service mode")
.longOpt(ARG_SERVICE_MODE).build());
_options
.addOption(Option.builder().argName("port_number").hasArg()
.desc("port for batfish service").longOpt(ARG_SERVICE_PORT)
.build());
_options.addOption(Option.builder().argName("base_url").hasArg()
.desc("base url for batfish service").longOpt(ARG_SERVICE_URL)
.build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_BUILD_PREDICATE_INFO)
.desc("build predicate info (should only be called by ant build script) with provided input logic dir")
.longOpt(ARG_BUILD_PREDICATE_INFO).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_BLACKLIST_INTERFACE)
.desc("interface to blacklist (force inactive) during analysis")
.longOpt(ARG_BLACKLIST_INTERFACE).build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_BLACKLIST_NODE)
.desc("node to blacklist (remove from configuration structures) during analysis")
.longOpt(ARG_BLACKLIST_NODE).build());
_options.addOption(Option.builder().hasArg().argName(ARGNAME_ACCEPT_NODE)
.desc("accept node for reachability query")
.longOpt(ARG_ACCEPT_NODE).build());
_options
.addOption(Option
.builder()
.desc("generate interface-failure-inconsistency reachable packet query")
.longOpt(ARG_REACH).build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_REACH_PATH)
.desc("path to read or write interface-failure-inconsistency reachable packet query")
.longOpt(ARG_REACH_PATH).build());
_options
.addOption(Option
.builder()
.desc("generate interface-failure-inconsistency black-hole packet query")
.longOpt(ARG_BLACK_HOLE).build());
_options
.addOption(Option
.builder()
.desc("only concretize single packet (do not break up disjunctions)")
.longOpt(ARG_CONC_UNIQUE).build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_BLACK_HOLE_PATH)
.desc("path to read or write interface-failure-inconsistency black-hole packet query")
.longOpt(ARG_BLACK_HOLE_PATH).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_BLACKLIST_DST_IP)
.desc("destination ip to blacklist for concretizer queries")
.longOpt(ARG_BLACKLIST_DST_IP_PATH).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_NODE_ROLES_PATH)
.desc("path to read or write node-role mappings")
.longOpt(ARG_NODE_ROLES_PATH).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_ROLE_NODES_PATH)
.desc("path to read or write role-node mappings")
.longOpt(ARG_ROLE_NODES_PATH).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_ROLE_REACHABILITY_QUERY_PATH)
.desc("path to read or write role-reachability queries")
.longOpt(ARG_ROLE_REACHABILITY_QUERY_PATH).build());
_options.addOption(Option.builder()
.desc("generate role-reachability queries")
.longOpt(ARG_ROLE_REACHABILITY_QUERY).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_ROLE_TRANSIT_QUERY_PATH)
.desc("path to read or write role-transit queries")
.longOpt(ARG_ROLE_TRANSIT_QUERY_PATH).build());
_options.addOption(Option.builder().desc("generate role-transit queries")
.longOpt(ARG_ROLE_TRANSIT_QUERY).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_ROLE_SET_PATH)
.desc("path to read or write role set").longOpt(ARG_ROLE_SET_PATH)
.build());
_options.addOption(Option.builder()
.desc("duplicate flows across all nodes in same role")
.longOpt(ARG_DUPLICATE_ROLE_FLOWS).build());
_options.addOption(Option.builder().hasArg().argName(ARGNAME_LOG_LEVEL)
.desc("log level").longOpt(ARG_LOG_LEVEL).build());
_options.addOption(Option.builder()
.desc("header of concretized z3 output refers to role, not node")
.longOpt(ARG_ROLE_HEADERS).build());
_options.addOption(Option.builder()
.desc("throw exception immediately on parser error")
.longOpt(ARG_THROW_ON_PARSER_ERROR).build());
_options.addOption(Option.builder()
.desc("throw exception immediately on lexer error")
.longOpt(ARG_THROW_ON_LEXER_ERROR).build());
_options.addOption(Option.builder()
.desc("flatten hierarchical juniper configuration files")
.longOpt(ARG_FLATTEN).build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_FLATTEN_SOURCE)
.desc("path to test rig containing hierarchical juniper configurations to be flattened")
.longOpt(ARG_FLATTEN_SOURCE).build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_FLATTEN_DESTINATION)
.desc("output path to test rig in which flat juniper (and all other) configurations will be placed")
.longOpt(ARG_FLATTEN_DESTINATION).build());
_options
.addOption(Option
.builder()
.desc("flatten hierarchical juniper configuration files on-the-fly (line number references will be spurious)")
.longOpt(ARG_FLATTEN_ON_THE_FLY).build());
_options
.addOption(Option
.builder()
.desc("throws "
+ PedanticBatfishException.class.getSimpleName()
+ " for likely harmless warnings (e.g. deviation from good configuration style), instead of emitting warning and continuing")
.longOpt(ARG_PEDANTIC_AS_ERROR).build());
_options.addOption(Option.builder().desc("suppresses pedantic warnings")
.longOpt(ARG_PEDANTIC_SUPPRESS).build());
_options
.addOption(Option
.builder()
.desc("throws "
+ RedFlagBatfishException.class.getSimpleName()
+ " on some recoverable errors (e.g. bad config lines), instead of emitting warning and attempting to recover")
.longOpt(ARG_RED_FLAG_AS_ERROR).build());
_options.addOption(Option.builder().desc("suppresses red-flag warnings")
.longOpt(ARG_RED_FLAG_SUPPRESS).build());
_options
.addOption(Option
.builder()
.desc("throws "
+ UnimplementedBatfishException.class.getSimpleName()
+ " when encountering unimplemented configuration directives, instead of emitting warning and ignoring")
.longOpt(ARG_UNIMPLEMENTED_AS_ERROR).build());
_options.addOption(Option.builder()
.desc("suppresses unimplemented-configuration-directive warnings")
.longOpt(ARG_UNIMPLEMENTED_SUPPRESS).build());
_options.addOption(Option.builder()
.desc("build histogram of unimplemented features")
.longOpt(ARG_HISTOGRAM).build());
_options.addOption(Option.builder().desc("generate stubs")
.longOpt(ARG_GENERATE_STUBS).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_GENERATE_STUBS_INPUT_ROLE)
.desc("input role for which to generate stubs")
.longOpt(ARG_GENERATE_STUBS_INPUT_ROLE).build());
_options
.addOption(Option
.builder()
.hasArg()
.argName(ARGNAME_GENERATE_STUBS_INTERFACE_DESCRIPTION_REGEX)
.desc("java regex to extract hostname of generated stub from description of adjacent interface")
.longOpt(ARG_GENERATE_STUBS_INTERFACE_DESCRIPTION_REGEX)
.build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_GENERATE_STUBS_REMOTE_AS)
.desc("autonomous system number of stubs to be generated")
.longOpt(ARG_GENERATE_STUBS_REMOTE_AS).build());
_options.addOption(Option.builder().hasArg().argName(ARGNAME_LOG_FILE)
.desc("path to main log file").longOpt(ARG_LOG_FILE).build());
_options.addOption(Option.builder().hasArg().argName(ARGNAME_GEN_OSPF)
.desc("generate ospf configs from specified topology")
.longOpt(ARG_GEN_OSPF).build());
_options.addOption(Option.builder()
.desc("print timestamps in log messages").longOpt(ARG_TIMESTAMP)
.build());
_options
.addOption(Option
.builder()
.desc("ignore configuration files with unsupported format instead of crashing")
.longOpt(ARG_IGNORE_UNSUPPORTED).build());
_options.addOption(Option.builder().hasArg()
.argName(ARGNAME_AUTO_BASE_DIR)
.desc("path to base dir for automatic i/o path selection")
.longOpt(ARG_AUTO_BASE_DIR).build());
}
private void parseCommandLine(String[] args) throws ParseException {
_canExecute = true;
_runInServiceMode = false;
_printSemantics = false;
CommandLine line = null;
CommandLineParser parser = new DefaultParser();
// parse the command line arguments
line = parser.parse(_options, args);
_logLevel = line.getOptionValue(ARG_LOG_LEVEL, DEFAULT_LOG_LEVEL);
_logFile = line.getOptionValue(ARG_LOG_FILE);
if (line.hasOption(ARG_HELP)) {
_canExecute = false;
// automatically generate the help statement
HelpFormatter formatter = new HelpFormatter();
formatter.setLongOptPrefix("-");
formatter.printHelp(EXECUTABLE_NAME, _options);
return;
}
_runInServiceMode = line.hasOption(ARG_SERVICE_MODE);
_servicePort = Integer.parseInt(line.getOptionValue(ARG_SERVICE_PORT,
DEFAULT_SERVICE_PORT));
_serviceUrl = line.getOptionValue(ARG_SERVICE_URL, DEFAULT_SERVICE_URL);
_counts = line.hasOption(ARG_COUNT);
_queryAll = line.hasOption(ARG_QUERY_ALL);
_query = line.hasOption(ARG_QUERY);
if (line.hasOption(ARG_PREDHELP)) {
_printSemantics = true;
String[] optionValues = line.getOptionValues(ARG_PREDHELP);
if (optionValues != null) {
_helpPredicates = Arrays.asList(optionValues);
}
}
_cbHost = line.getOptionValue(ARG_CB_HOST, DEFAULT_CONNECTBLOX_HOST);
_cbPort = Integer.parseInt(line.getOptionValue(ARG_CB_PORT,
DEFAULT_CONNECTBLOX_REGULAR_PORT));
_testRigPath = line.getOptionValue(ARG_TEST_RIG_PATH,
DEFAULT_TEST_RIG_PATH);
_workspaceName = line.getOptionValue(ARG_WORKSPACE, null);
if (line.hasOption(ARG_PREDICATES)) {
_predicates = Arrays.asList(line.getOptionValues(ARG_PREDICATES));
}
else {
_predicates = DEFAULT_PREDICATES;
}
_removeFacts = line.hasOption(ARG_REMOVE_FACTS);
_compile = line.hasOption(ARG_COMPILE);
_facts = line.hasOption(ARG_FACTS);
_update = line.hasOption(ARG_UPDATE);
_noTraffic = line.hasOption(ARG_NO_TRAFFIC);
_exitOnParseError = line.hasOption(ARG_EXIT_ON_PARSE_ERROR);
_z3 = line.hasOption(ARG_Z3);
if (_z3) {
_z3File = line.getOptionValue(ARG_Z3_OUTPUT, DEFAULT_Z3_OUTPUT);
}
_concretize = line.hasOption(ARG_Z3_CONCRETIZE);
if (_concretize) {
_concretizerInputFilePaths = line
.getOptionValues(ARG_Z3_CONCRETIZER_INPUT_FILES);
_negatedConcretizerInputFilePaths = line
.getOptionValues(ARG_Z3_CONCRETIZER_NEGATED_INPUT_FILES);
_concretizerOutputFilePath = line
.getOptionValue(ARG_Z3_CONCRETIZER_OUTPUT_FILE);
}
_flows = line.hasOption(ARG_FLOWS);
if (_flows) {
_flowPath = line.getOptionValue(ARG_FLOW_PATH, DEFAULT_FLOW_PATH);
}
_flowSinkPath = line.getOptionValue(ARG_FLOW_SINK_PATH);
_dumpIF = line.hasOption(ARG_DUMP_IF);
if (_dumpIF) {
_dumpIFDir = line.getOptionValue(ARG_DUMP_IF_DIR, DEFAULT_DUMP_IF_DIR);
}
_dumpControlPlaneFacts = line.hasOption(ARG_DUMP_CONTROL_PLANE_FACTS);
_dumpTrafficFacts = line.hasOption(ARG_DUMP_TRAFFIC_FACTS);
_dumpFactsDir = line.getOptionValue(ARG_DUMP_FACTS_DIR,
DEFAULT_DUMP_FACTS_DIR);
_revertBranchName = line.getOptionValue(ARG_REVERT);
_revert = (_revertBranchName != null);
_redirectStdErr = line.hasOption(ARG_REDIRECT_STDERR);
_anonymize = line.hasOption(ARG_ANONYMIZE);
if (_anonymize) {
_anonymizeDir = line.getOptionValue(ARG_ANONYMIZE);
}
_logicDir = line.getOptionValue(ARG_LOGICDIR, null);
_simplify = DEFAULT_Z3_SIMPLIFY;
if (line.hasOption(ARG_DISABLE_Z3_SIMPLIFICATION)) {
_simplify = false;
}
_serializeVendor = line.hasOption(ARG_SERIALIZE_VENDOR);
_serializeVendorPath = line.getOptionValue(ARG_SERIALIZE_VENDOR_PATH,
DEFAULT_SERIALIZE_VENDOR_PATH);
_serializeIndependent = line.hasOption(ARG_SERIALIZE_INDEPENDENT);
_serializeIndependentPath = line.getOptionValue(
ARG_SERIALIZE_INDEPENDENT_PATH, DEFAULT_SERIALIZE_INDEPENDENT_PATH);
_dataPlane = line.hasOption(ARG_DATA_PLANE);
_dataPlaneDir = line.getOptionValue(ARG_DATA_PLANE_DIR,
DEFAULT_DATA_PLANE_DIR);
_printParseTree = line.hasOption(ARG_PRINT_PARSE_TREES);
_dumpInterfaceDescriptions = line
.hasOption(ARG_DUMP_INTERFACE_DESCRIPTIONS);
_dumpInterfaceDescriptionsPath = line.getOptionValue(
ARG_DUMP_INTERFACE_DESCRIPTIONS_PATH,
DEFAULT_DUMP_INTERFACE_DESCRIPTIONS_PATH);
_nodeSetPath = line.getOptionValue(ARG_NODE_SET_PATH);
_interfaceMapPath = line.getOptionValue(ARG_INTERFACE_MAP_PATH);
_varSizeMapPath = line.getOptionValue(ARG_VAR_SIZE_MAP_PATH);
_genMultipath = line.hasOption(ARG_MPI);
_mpiPath = line.getOptionValue(ARG_MPI_PATH);
_serializeToText = line.hasOption(ARG_SERIALIZE_TO_TEXT);
_lbWebPort = Integer.parseInt(line.getOptionValue(ARG_LB_WEB_PORT,
DEFAULT_LB_WEB_PORT));
_lbWebAdminPort = Integer.parseInt(line.getOptionValue(
ARG_LB_WEB_ADMIN_PORT, DEFAULT_LB_WEB_ADMIN_PORT));
_buildPredicateInfo = line.hasOption(ARG_BUILD_PREDICATE_INFO);
if (_buildPredicateInfo) {
_logicSrcDir = line.getOptionValue(ARG_BUILD_PREDICATE_INFO);
}
_blacklistInterface = line.getOptionValue(ARG_BLACKLIST_INTERFACE);
_blacklistNode = line.getOptionValue(ARG_BLACKLIST_NODE);
_reach = line.hasOption(ARG_REACH);
_reachPath = line.getOptionValue(ARG_REACH_PATH);
_blackHole = line.hasOption(ARG_BLACK_HOLE);
_blackHolePath = line.getOptionValue(ARG_BLACK_HOLE_PATH);
_blacklistDstIpPath = line.getOptionValue(ARG_BLACKLIST_DST_IP_PATH);
_concUnique = line.hasOption(ARG_CONC_UNIQUE);
_acceptNode = line.getOptionValue(ARG_ACCEPT_NODE);
_nodeRolesPath = line.getOptionValue(ARG_NODE_ROLES_PATH);
_roleNodesPath = line.getOptionValue(ARG_ROLE_NODES_PATH);
_roleReachabilityQueryPath = line
.getOptionValue(ARG_ROLE_REACHABILITY_QUERY_PATH);
_roleReachabilityQuery = line.hasOption(ARG_ROLE_REACHABILITY_QUERY);
_roleTransitQueryPath = line.getOptionValue(ARG_ROLE_TRANSIT_QUERY_PATH);
_roleTransitQuery = line.hasOption(ARG_ROLE_TRANSIT_QUERY);
_roleSetPath = line.getOptionValue(ARG_ROLE_SET_PATH);
_duplicateRoleFlows = line.hasOption(ARG_DUPLICATE_ROLE_FLOWS);
_roleHeaders = line.hasOption(ARG_ROLE_HEADERS);
_throwOnParserError = line.hasOption(ARG_THROW_ON_PARSER_ERROR);
_throwOnLexerError = line.hasOption(ARG_THROW_ON_LEXER_ERROR);
_flatten = line.hasOption(ARG_FLATTEN);
_flattenSource = line.getOptionValue(ARG_FLATTEN_SOURCE);
_flattenDestination = line.getOptionValue(ARG_FLATTEN_DESTINATION);
_flattenOnTheFly = line.hasOption(ARG_FLATTEN_ON_THE_FLY);
_pedanticAsError = line.hasOption(ARG_PEDANTIC_AS_ERROR);
_pedanticRecord = !line.hasOption(ARG_PEDANTIC_SUPPRESS);
_redFlagAsError = line.hasOption(ARG_RED_FLAG_AS_ERROR);
_redFlagRecord = !line.hasOption(ARG_RED_FLAG_SUPPRESS);
_unimplementedAsError = line.hasOption(ARG_UNIMPLEMENTED_AS_ERROR);
_unimplementedRecord = !line.hasOption(ARG_UNIMPLEMENTED_SUPPRESS);
_histogram = line.hasOption(ARG_HISTOGRAM);
_generateStubs = line.hasOption(ARG_GENERATE_STUBS);
_generateStubsInputRole = line
.getOptionValue(ARG_GENERATE_STUBS_INPUT_ROLE);
_generateStubsInterfaceDescriptionRegex = line
.getOptionValue(ARG_GENERATE_STUBS_INTERFACE_DESCRIPTION_REGEX);
if (line.hasOption(ARG_GENERATE_STUBS_REMOTE_AS)) {
_generateStubsRemoteAs = Integer.parseInt(line
.getOptionValue(ARG_GENERATE_STUBS_REMOTE_AS));
}
_genOspfTopology = line.getOptionValue(ARG_GEN_OSPF);
_timestamp = line.hasOption(ARG_TIMESTAMP);
_ignoreUnsupported = line.hasOption(ARG_IGNORE_UNSUPPORTED);
_autoBaseDir = line.getOptionValue(ARG_AUTO_BASE_DIR);
}
public boolean printParseTree() {
return _printParseTree;
}
public boolean redirectStdErr() {
return _redirectStdErr;
}
public boolean revert() {
return _revert;
}
public boolean runInServiceMode() {
return _runInServiceMode;
}
public void setDumpFactsDir(String path) {
_dumpFactsDir = path;
}
public void setLogger(BatfishLogger logger) {
_logger = logger;
}
public void setSerializeIndependentPath(String path) {
_serializeIndependentPath = path;
}
public void setSerializeVendorPath(String path) {
_serializeVendorPath = path;
}
public void setTestRigPath(String path) {
_testRigPath = path;
}
}
|
package uk.co.plogic.gwt.lib.dom;
import java.util.ArrayList;
import java.util.HashMap;
import uk.co.plogic.gwt.lib.events.MouseClickEvent;
import uk.co.plogic.gwt.lib.events.MouseOutEvent;
import uk.co.plogic.gwt.lib.events.MouseOutEventHandler;
import uk.co.plogic.gwt.lib.events.MouseOverEvent;
import uk.co.plogic.gwt.lib.events.MouseOverEventHandler;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.shared.HandlerManager;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.EventListener;
/**
*
* Setup the DOM to fire the {@link uk.co.plogic.gwt.lib.events.MouseOverEvent},
* {@link uk.co.plogic.gwt.lib.events.MouseOutEvent} and {@link uk.co.plogic.gwt.lib.events.MouseClickEvent}
* events when an element with the given class is hovered over. The activeClassName css class is added to
* elements when they are moused over.
*
* The element with the given class must also have the class suffixed with _<id>.
* e.g.
* <p class="mouse_over mouse_over_1">one</p>
*
* Having two classes makes styling easy as all elements in a group can be styled together
* and the active item can be highlighted.
*
*
* @author si
*
*/
public class AttachActiveElementsEvent {
private HashMap<String, ArrayList<Element>> mouseOverElements = new HashMap<String, ArrayList<Element>>();
private final String activeClassName;
/**
*
* @param className
*/
public AttachActiveElementsEvent(final HandlerManager eventBus, final String className,
final String activeClassName ) {
this.activeClassName = activeClassName;
eventBus.addHandler(MouseOverEvent.TYPE, new MouseOverEventHandler() {
@Override
public void onMouseOver(MouseOverEvent e) {
setElementActive(true, e.getMouseOver_id());
}
});
eventBus.addHandler(MouseOutEvent.TYPE, new MouseOutEventHandler() {
@Override
public void onMouseOut(MouseOutEvent e) {
setElementActive(false, e.getMouseOut_id());
}
});
DomParser domParser = new DomParser();
domParser.addHandler(new DomElementByClassNameFinder(className) {
@Override
public void onDomElementFound(Element element, String id) {
String mouseOverID = null;
// find additional class name, i.e. "1" in class="mouse_over mouse_over_1"
for( String aClass : element.getClassName().split(" ") ) {
if( aClass.startsWith(className+'_')) {
mouseOverID = aClass.substring(className.length()+1);
if( ! mouseOverElements.containsKey(mouseOverID) ) {
mouseOverElements.put(mouseOverID, new ArrayList<Element>());
}
mouseOverElements.get(mouseOverID).add(element);
break;
}
}
if( mouseOverID != null ) {
final String mouseOverID_f = mouseOverID;
Event.setEventListener(element, new EventListener() {
@Override
public void onBrowserEvent(Event event) {
switch (DOM.eventGetType(event)) {
case Event.ONMOUSEOVER:
eventBus.fireEvent(new MouseOverEvent(mouseOverID_f));
break;
case Event.ONMOUSEOUT:
eventBus.fireEvent(new MouseOutEvent(mouseOverID_f));
break;
case Event.ONCLICK:
eventBus.fireEvent(new MouseClickEvent(mouseOverID_f));
break;
}
}
});
Event.sinkEvents(element, Event.ONMOUSEOVER | Event.ONMOUSEOUT | Event.ONCLICK);
}
}
});
domParser.parseDom();
}
/**
* Add remove "active" class into mouseover element
* @param active
* @param mouseOverID
*/
protected void setElementActive(boolean active, String mouseOverID) {
if( mouseOverElements.containsKey(mouseOverID) ) {
for( Element ee : mouseOverElements.get(mouseOverID) ) {
if(active) ee.addClassName(activeClassName);
else ee.removeClassName(activeClassName);
}
}
}
}
|
package edu.wustl.catissuecore.query;
import edu.wustl.catissuecore.util.global.Constants;
/**
* @author aarti_sharma
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public class DataElement
{
/**
* Table/object name
*/
private String table;
/**
* Field name
*/
private String field;
private String fieldType;
public DataElement()
{
}
/**
* Constructor
* @param table Table/object name
* @param field Field name
*/
public DataElement(String table, String field)
{
this.table = table;
this.field = field;
}
public DataElement(String table, String field, String fieldType)
{
this.table = table;
this.field = field;
this.fieldType = fieldType;
}
/**
* SQL string representation
* @param tableSufix sufix for table name
* @return SQL string representation
*/
public String toSQLString(int tableSufix)
{
String fieldName = table + tableSufix + "." + field+" ";
if ((fieldType != null) && (Constants.FIELD_TYPE_TIMESTAMP_TIME.equalsIgnoreCase(fieldType)))
{
fieldName = Constants.MYSQL_TIME_FORMAT_FUNCTION + "(" + fieldName + ",'" + Constants.MYSQL_TIME_PATTERN + "') ";
}
else if ((fieldType != null) && (Constants.FIELD_TYPE_TIMESTAMP_DATE.equalsIgnoreCase(fieldType)))
{
fieldName = Constants.MYSQL_DATE_FORMAT_FUNCTION + "(" + fieldName + ",'" + Constants.MYSQL_DATE_PATTERN + "') ";
}
return fieldName;
}
public String getColumnNameString(int tableSufix)
{
return table + tableSufix + "_" + field;
}
public boolean equals(Object obj)
{
if (obj instanceof DataElement) {
DataElement dataElement = (DataElement)obj;
if(!table.equals(dataElement.table))
return false;
if(!field.equals(dataElement.field))
return false;
return true;
}
else
return false;
}
public int hashCode()
{
return 1;
}
public String getField()
{
return field;
}
public void setField(String field)
{
this.field = field;
}
/**
* @return Returns the fieldType.
*/
public String getFieldType()
{
return fieldType;
}
/**
* @param fieldType The fieldType to set.
*/
public void setFieldType(String fieldType)
{
this.fieldType = fieldType;
}
public String getTable()
{
return table;
}
public void setTable(String table)
{
this.table = table;
}
}
|
package com.ctrip.hermes.rest.status;
public class Tge {
private String m_topic;
private String m_group;
private String m_endpoint;
public Tge(String topic, String group, String endpoint) {
this.m_topic = topic;
this.m_group = group;
this.m_endpoint = endpoint;
}
public String getEndpoint() {
return m_endpoint;
}
public String getGroup() {
return m_group;
}
public String getTopic() {
return m_topic;
}
public void setEndpoint(String m_endpoint) {
this.m_endpoint = m_endpoint;
}
public void setGroup(String m_group) {
this.m_group = m_group;
}
public void setTopic(String m_topic) {
this.m_topic = m_topic;
}
public int hashCode() {
return this.m_topic.hashCode() * 37 * this.m_group.hashCode() * this.m_endpoint.hashCode();
}
public boolean equals(Object rhs) {
if (rhs == null)
return false;
if (!(rhs instanceof Tge)) {
return false;
}
Tge rObj = (Tge) rhs;
return this.m_topic.equals(rObj.m_topic) && this.m_group.equals(rObj.m_group)
&& this.m_endpoint.equals(rObj.m_endpoint);
}
public String toString() {
return new StringBuilder().append(m_topic).append('-').append(m_group).append('-').append(m_endpoint).toString();
}
}
|
package edu.wustl.common.util;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.Vector;
import javax.swing.tree.DefaultMutableTreeNode;
import edu.wustl.catissuecore.actionForm.AdvanceSearchForm;
import edu.wustl.catissuecore.query.AdvancedConditionsNode;
import edu.wustl.catissuecore.query.Condition;
import edu.wustl.catissuecore.query.DataElement;
import edu.wustl.catissuecore.query.Operator;
import edu.wustl.catissuecore.util.global.Constants;
import edu.wustl.common.util.logger.Logger;
/**
* @author poornima_govindrao
*
* ConditionMapParser is the parser class to parse the Condition object and create advancedConditionNode for Advance Search
*/
public class ConditionMapParser
{
private Map createMap(String []keys,String []values)
{
Map map = new HashMap();
for(int i=0;i<keys.length;i++)
map.put(keys[i],values[i]);
return map;
}
//Given a Map, parseCondition function creates list of conditions
public List parseCondition(Map conditionMap)
{
List conditionList=new ArrayList();
Iterator keyItr = conditionMap.keySet().iterator();
while(keyItr.hasNext())
{
DataElement dataElement = new DataElement();
String key = (String)keyItr.next();
//Check for the keys of operators which is in the form Operator:TableAliasName:ColumnName
if(key.startsWith("Operator"))
{
StringTokenizer st = new StringTokenizer(key, ":");
String operator=(String)conditionMap.get(key);
if(!operator.equals(Constants.ANY))
{
String value=new String();
String value2=new String();
String operator1=new String();
String operator2=new String();
while(st.hasMoreTokens())
{
st.nextToken();
String aliasName = st.nextToken();
//Logger.out.debug("table name in condition obj"+aliasName);
dataElement.setTable(aliasName);
String columnName = st.nextToken();
dataElement.setField(columnName);
value = (String)conditionMap.get(aliasName+":"+columnName);
Logger.out.debug("value1 "+value);
//Create two different conditions in case of Between and Not Between operators.
if(operator.equals(Operator.NOT_BETWEEN))
{
operator1 = Operator.LESS_THAN_OR_EQUALS;
operator2 = Operator.GREATER_THAN_OR_EQUALS;
value2 = (String)conditionMap.get(aliasName+":"+columnName+":"+"HLIMIT");
}
if(operator.equals(Operator.BETWEEN))
{
operator1 = Operator.GREATER_THAN_OR_EQUALS;
operator2 = Operator.LESS_THAN_OR_EQUALS;
value2 = (String)conditionMap.get(aliasName+":"+columnName+":"+"HLIMIT");
}
Logger.out.debug("value2 "+value2);
}
//String operatorValue = Operator.getOperator(operator);
Condition condition = new Condition(dataElement,new Operator(operator),value);
if(operator.equals(Operator.NOT_BETWEEN))
{
condition = new Condition(dataElement,new Operator(operator2),value2);
Condition condition1 = new Condition(dataElement,new Operator(operator1),value);
conditionList.add(condition1);
}
if(operator.equals(Operator.BETWEEN))
{
condition = new Condition(dataElement,new Operator(operator2),value2);
Condition condition1 = new Condition(dataElement,new Operator(operator1),value);
conditionList.add(condition1);
}
conditionList.add(condition);
}
}
}
return conditionList;
}
//Given a list of conditions, creates an advancedConditionNode and adds it to the root.
public DefaultMutableTreeNode createAdvancedQueryObj(List list,DefaultMutableTreeNode root,String objectName,String selectedNode,Map advancedConditionNodesMap,Integer nodeId)
{
//Query query = QueryFactory.getInstance().newQuery(Query.ADVANCED_QUERY,Query.PARTICIPANT);
// String tableObject = condition.getDataElement().getTable();
Logger.out.debug("selectedNode"+selectedNode);
Vector objectConditions = new Vector(list);
// String prevTableObj;
Logger.out.debug("nodeId--"+nodeId);
//Condition for Add operation
if(nodeId == null)
{
AdvancedConditionsNode advancedConditionsNode = new AdvancedConditionsNode(objectName);
// Iterator itr = list.iterator();
// while(itr.hasNext())
// Condition condition = (Condition)itr.next();
// advancedConditionsNode.addConditionToNode(condition);
advancedConditionsNode.setObjectConditions(objectConditions);
DefaultMutableTreeNode child = new DefaultMutableTreeNode(advancedConditionsNode);
if(root.getChildCount()==0)
{
root.add(child);
}
else
{
int nodeCount=0;
if(selectedNode.equals(""))
addNode(root,0,nodeCount,child,objectName,advancedConditionNodesMap);
else
addNode(root,Integer.parseInt(selectedNode),nodeCount,child,objectName,advancedConditionNodesMap);
Logger.out.debug("root size"+root.getDepth());
}
}
//Else edit operation
else
{
DefaultMutableTreeNode node = (DefaultMutableTreeNode)advancedConditionNodesMap.get(nodeId);
AdvancedConditionsNode advancedConditionsNode = (AdvancedConditionsNode)node.getUserObject();
advancedConditionsNode.setObjectConditions(objectConditions);
}
/*if(delete)
{
//Map deleteNodeMap = new HashMap();
TraverseTree traverseTree = new TraverseTree();
DefaultMutableTreeNode node = traverseTree.getSelectedNode(root,nodeId);
AdvancedConditionsNode advNode1 = (AdvancedConditionsNode)node.getUserObject();
Vector conditions1 = advNode1.getObjectConditions();
Iterator itr1 = conditions1.iterator();
while(itr1.hasNext())
{
Condition condition1 = (Condition)itr1.next();
Logger.out.debug("Column Name: "+condition1.getDataElement().getField());
}
DefaultMutableTreeNode parent = (DefaultMutableTreeNode)node.getParent();
int position = parent.getIndex(node);
Logger.out.debug("position--"+position);
//parent.remove(position);
//Logger.out.debug("position using tree--"+root.getIndex(node));
}*/
//((AdvancedConditionsImpl)((AdvancedQuery)query).whereConditions).setWhereCondition(root);
//advQueryObj.setTableSet(fromTables);
//List dataList = query.execute();
//System.out.println("Data: "+list);
//String fileName = Variables.catissueHome+System.getProperty("file.separator")+"AdvancedQueryResult.csv";
//ExportReport exp = new ExportReport(fileName);
return root;
}
public static void main(String[] args) throws Exception
{
Map map = new HashMap();
map.put("EventName_1","CellSpecimenReviewEventParameters");
map.put("EventColumnName_1","CellSpecimenReviewEventParameters.IDENTIFIER.bigint");
map.put("EventColumnOperator_1","=");
map.put("EventColumnValue_1","1");
ConditionMapParser conditionParser = new ConditionMapParser();
String keys1[] = {"Participant:LAST_NAME","Participant:GENDER","Operator:Participant:LAST_NAME","Operator:Participant:GENDER"};
String values1[] = {"Part","Male","LIKE","EQUALS"};
String keys2[] = {"CollectionProtocolRegistration:INDENTIFIER","Operator:CollectionProtocolRegistration:INDENTIFIER"};
String values2[] = {"1","GREATER_THAN"};
String keys3[] = {"CollectionProtocolRegistration:INDENTIFIER","Operator:CollectionProtocolRegistration:INDENTIFIER"};
String values3[] = {"10","LESS_THAN"};
String keys4[] = {"SpecimenCollectionGroup:CLINICAL_STATUS","Operator:SpecimenCollectionGroup:CLINICAL_STATUS"};
String values4[] = {"Relapse","Equal"};
String keys5[] = {"Participant:LAST_NAME","Operator:Participant:LAST_NAME"};
String values5[] = {"A","LIKE"};
String keys6[] = {"Specimen:TYPE","Operator:Specimen:TYPE"};
String values6[] = {"cDNA","EQUAL"};
Map map1 = conditionParser.createMap( keys1, values1);
System.out.println("Map: "+map1);
Map map2 = conditionParser.createMap( keys2, values2);
Map map3 = conditionParser.createMap( keys3, values3);
Map map4 = conditionParser.createMap( keys4, values4);
Map map5 = conditionParser.createMap( keys5, values5);
Map map6 = conditionParser.createMap( keys6, values6);
//System.out.println(map);
List dataCollection1 = conditionParser.parseCondition(map1);
System.out.println("List: "+dataCollection1);
List dataCollection2 = conditionParser.parseCondition(map2);
List dataCollection3 = conditionParser.parseCondition(map3);
List dataCollection4 = conditionParser.parseCondition(map4);
List dataCollection5 = conditionParser.parseCondition(map5);
List dataCollection6 = conditionParser.parseCondition(map6);
DefaultMutableTreeNode root = new DefaultMutableTreeNode();
// root = conditionParser.createAdvancedQueryObj(dataCollection1,root,"Participant","");
// root = conditionParser.createAdvancedQueryObj(dataCollection2,root,"CollectionProtocolRegistration","");
// root = conditionParser.createAdvancedQueryObj(dataCollection3,root,"CollectionProtocolRegistration","");
// root = conditionParser.createAdvancedQueryObj(dataCollection4,root,"SpecimenCollectionGroup","");
// root = conditionParser.createAdvancedQueryObj(dataCollection5,root,"Participant","");
// root = conditionParser.createAdvancedQueryObj(dataCollection6,root,"Specimen","");
//conditionParser.traverseTree(root,null,null,false,0,null);
}
//Traverse root and display map contents.
//Add advancedConditionNode
private void addNode(DefaultMutableTreeNode tree,int selectedNode,int nodeCount,DefaultMutableTreeNode presentNode,String objectName,Map advancedConditionNodesMap)
{
//DefaultMutableTreeNode child = new DefaultMutableTreeNode();
DefaultMutableTreeNode parent = new DefaultMutableTreeNode();
DefaultMutableTreeNode selectedAdvNode = (DefaultMutableTreeNode) advancedConditionNodesMap.get(new Integer(selectedNode));
if(selectedNode==0)
{
selectedAdvNode = (DefaultMutableTreeNode) advancedConditionNodesMap.get(new Integer(0));
selectedAdvNode.add(presentNode);
}
else
{
parent = new DefaultMutableTreeNode();
selectedAdvNode = (DefaultMutableTreeNode) advancedConditionNodesMap.get(new Integer(selectedNode));
String presentObjectName =((AdvancedConditionsNode)selectedAdvNode.getUserObject()).getObjectName();
Logger.out.debug("selectedAdvNode's object name"+((AdvancedConditionsNode)selectedAdvNode.getUserObject()).getObjectName());
if(objectName.equals(presentObjectName))
{
parent =(DefaultMutableTreeNode) selectedAdvNode.getParent();
parent.add(presentNode);
}
else
selectedAdvNode.add(presentNode);
}
}
/**
* This function parses the event parameter map & returns it in a format
* suitable to parseCondition() function.
* @param eventMap A map of specimen event parameters that is to be parsed.
* @return Map the parsed map suitable for parseCondition().
*/
public static Map parseEventParameterMap(Map eventMap)
{
Map newMap = new HashMap();
if(eventMap != null)
{
int rows = eventMap.size() / 4;
//Constants for eventMap keys
String columnKeyConstant = "EventColumnName_";
String columnValConstant = "EventColumnValue_";
String operatorConstant = "EventColumnOperator_";
for(int i=1;i<=rows;i++)
{
//Preparing the eventMap keys
String columnKey = columnKeyConstant + i;
String columnValKey = columnValConstant + i;
String operatorKey = operatorConstant + i;
String columnKeyValue = (String)eventMap.get(columnKey);
StringTokenizer tokenizer = new StringTokenizer(columnKeyValue,".");
//Extracting alias name & column name
String aliasName = tokenizer.nextToken();
String columnName = tokenizer.nextToken();
//Extracting actual column value & operator value
String columnValue = (String)eventMap.get(columnValKey);
String operatorValue = (String)eventMap.get(operatorKey);
//Preparing keys for new map
String newValKey = aliasName + ":" + columnName;
String newOpKey = "Operator:" + aliasName + ":" + columnName;
//Setting values in new map
newMap.put(newValKey,columnValue);
newMap.put(newOpKey,operatorValue);
}
}
return newMap;
}
}
|
package org.sagebionetworks.web.unitclient.widget.entity;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyMap;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.sagebionetworks.repo.model.wiki.WikiPage;
import org.sagebionetworks.web.client.GWTWrapper;
import org.sagebionetworks.web.client.PortalGinInjector;
import org.sagebionetworks.web.client.SynapseClientAsync;
import org.sagebionetworks.web.client.SynapseJSNIUtils;
import org.sagebionetworks.web.client.cookie.CookieProvider;
import org.sagebionetworks.web.client.resources.ResourceLoader;
import org.sagebionetworks.web.client.resources.WebResource;
import org.sagebionetworks.web.client.utils.Callback;
import org.sagebionetworks.web.client.widget.WidgetRendererPresenter;
import org.sagebionetworks.web.client.widget.entity.ElementWrapper;
import org.sagebionetworks.web.client.widget.entity.MarkdownWidget;
import org.sagebionetworks.web.client.widget.entity.MarkdownWidgetView;
import org.sagebionetworks.web.client.widget.entity.controller.SynapseAlert;
import org.sagebionetworks.web.client.widget.entity.registration.WidgetRegistrar;
import org.sagebionetworks.web.shared.WidgetConstants;
import org.sagebionetworks.web.shared.WikiPageKey;
import org.sagebionetworks.web.test.helper.AsyncMockStubber;
import com.google.gwt.junit.GWTMockUtilities;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Widget;
public class MarkdownWidgetTest {
MarkdownWidget presenter;
CookieProvider mockCookies;
PortalGinInjector mockInjector;
GWTWrapper mockGwt;
SynapseClientAsync mockSynapseClient;
SynapseJSNIUtils mockSynapseJSNIUtils;
MarkdownWidgetView mockView;
WidgetRegistrar mockWidgetRegistrar;
ResourceLoader mockResourceLoader;
SynapseAlert mockSynAlert;
WikiPageKey mockWikiPageKey;
WikiPage mockWikiPage;
ElementWrapper mockElementWrapper;
WidgetRendererPresenter mockWidgetRendererPresenter;
String testMarkdown = "markdown";
String elementContentType = "image";
Exception caught = new Exception("test");
@Before
public void setup() {
mockSynapseClient = mock(SynapseClientAsync.class);
mockSynapseJSNIUtils = mock(SynapseJSNIUtils.class);
mockWidgetRegistrar = mock(WidgetRegistrar.class);
mockWidgetRendererPresenter = mock(WidgetRendererPresenter.class);
when(mockWidgetRegistrar.getWidgetRendererForWidgetDescriptor(any(WikiPageKey.class), anyString(), anyMap(), any(Callback.class), any(Long.class))).thenReturn(mockWidgetRendererPresenter);
mockView = mock(MarkdownWidgetView.class);
mockGwt = mock(GWTWrapper.class);
mockCookies = mock(CookieProvider.class);
mockInjector = mock(PortalGinInjector.class);
mockSynAlert = mock(SynapseAlert.class);
mockResourceLoader = mock(ResourceLoader.class);
mockWikiPageKey = mock(WikiPageKey.class);
mockWikiPage = mock(WikiPage.class);
when(mockWikiPage.getMarkdown()).thenReturn(testMarkdown);
mockElementWrapper = mock(ElementWrapper.class);
//the mockElement to be rendered will be an image
when(mockElementWrapper.getAttribute("widgetParams")).thenReturn(elementContentType);
presenter = new MarkdownWidget(mockSynapseClient, mockSynapseJSNIUtils, mockWidgetRegistrar, mockCookies, mockResourceLoader, mockGwt, mockInjector, mockView, mockSynAlert);
}
@Test
public void testConfigureSuccess() {
boolean isPreview = true;
String sampleHTML = "<h1>heading</h1><p>foo baz bar</p>";
AsyncMockStubber.callSuccessWith(sampleHTML).when(mockSynapseClient).markdown2Html(anyString(), anyBoolean(), anyBoolean(), anyString(), any(AsyncCallback.class));
//only the first getElementById called by each getElementById finds its target so it doesn't look forever but still can be verified
when(mockView.getElementById(WidgetConstants.MARKDOWN_TABLE_ID_PREFIX + "0")).thenReturn(mockElementWrapper);
when(mockView.getElementById(WidgetConstants.DIV_ID_MATHJAX_PREFIX + "0" + "-preview")).thenReturn(mockElementWrapper);
when(mockView.getElementById(org.sagebionetworks.markdown.constants.WidgetConstants.DIV_ID_WIDGET_PREFIX + "0" + "-preview")).thenReturn(mockElementWrapper);
when(mockResourceLoader.isLoaded(any(WebResource.class))).thenReturn(true);
presenter.configure(testMarkdown, mockWikiPageKey, isPreview, null);
ArgumentCaptor<Callback> callbackCaptor = ArgumentCaptor.forClass(Callback.class);
verify(mockView).callbackWhenAttached(callbackCaptor.capture());
callbackCaptor.getValue().invoke();
verify(mockSynapseClient).markdown2Html(anyString(), Mockito.eq(isPreview), anyBoolean(), anyString(), any(AsyncCallback.class));
verify(mockView).setMarkdown(sampleHTML);
// Called three times between tablesorter, loadMath, and loadWidgets,
// then another three to determine null
verify(mockView, Mockito.times(6)).getElementById(anyString());
//verify tablesorter applied
verify(mockSynapseJSNIUtils).tablesorter(anyString());
//verify loadMath
verify(mockSynapseJSNIUtils).processWithMathJax(mockElementWrapper.getElement());
//verify loadWidgets
verify(mockWidgetRegistrar).getWidgetContentType(elementContentType);
verify(mockWidgetRegistrar).getWidgetDescriptor(elementContentType);
verify(mockWidgetRegistrar).getWidgetRendererForWidgetDescriptor(Mockito.eq(mockWikiPageKey), anyString(), anyMap(), any(Callback.class), any(Long.class));
verify(mockView).addWidget(any(Widget.class), Mockito.eq(org.sagebionetworks.markdown.constants.WidgetConstants.DIV_ID_WIDGET_PREFIX + "0" + "-preview"));
}
@Test
public void testConfigureFailure() {
boolean isPreview = true;
AsyncMockStubber.callFailureWith(caught).when(mockSynapseClient).markdown2Html(anyString(), anyBoolean(), anyBoolean(), anyString(), any(AsyncCallback.class));
presenter.configure(testMarkdown, mockWikiPageKey, isPreview, null);
verify(mockSynAlert).handleException(caught);
}
@Test
public void testLoadMarkdownFromWikiPageSuccess() {
boolean isPreview = true;
String sampleHTML = "<h1>heading</h1><p>foo baz bar</p>";
AsyncMockStubber.callSuccessWith(mockWikiPage).when(mockSynapseClient).getV2WikiPageAsV1(any(WikiPageKey.class), any(AsyncCallback.class));
AsyncMockStubber.callSuccessWith(sampleHTML).when(mockSynapseClient).markdown2Html(anyString(), anyBoolean(), anyBoolean(), anyString(), any(AsyncCallback.class));
//only the first getElementById called by each getElementById finds its target so it doesn't look forever but still can be verified
when(mockView.getElementById(WidgetConstants.MARKDOWN_TABLE_ID_PREFIX + "0")).thenReturn(mockElementWrapper);
when(mockView.getElementById(WidgetConstants.DIV_ID_MATHJAX_PREFIX + "0" + "-preview")).thenReturn(mockElementWrapper);
when(mockView.getElementById(org.sagebionetworks.markdown.constants.WidgetConstants.DIV_ID_WIDGET_PREFIX + "0" + "-preview")).thenReturn(mockElementWrapper);
when(mockResourceLoader.isLoaded(any(WebResource.class))).thenReturn(true);
presenter.loadMarkdownFromWikiPage(mockWikiPageKey, isPreview, true);
ArgumentCaptor<Callback> callbackCaptor = ArgumentCaptor.forClass(Callback.class);
verify(mockView).callbackWhenAttached(callbackCaptor.capture());
callbackCaptor.getValue().invoke();
verify(mockWikiPageKey).setWikiPageId(anyString());
verify(mockSynapseClient).markdown2Html(anyString(), Mockito.eq(isPreview), anyBoolean(), anyString(), any(AsyncCallback.class));
verify(mockView).setEmptyVisible(true);
verify(mockView).clearMarkdown();
verify(mockView).setEmptyVisible(false);
verify(mockView).setMarkdown(sampleHTML);
// Called three times between tablesorter, loadMath, and loadWidgets,
// then another three to determine null
verify(mockView, Mockito.times(6)).getElementById(anyString());
//verify tablesorter applied
verify(mockSynapseJSNIUtils).tablesorter(anyString());
//verify loadMath
verify(mockSynapseJSNIUtils).processWithMathJax(mockElementWrapper.getElement());
//verify loadWidgets
verify(mockWidgetRegistrar).getWidgetContentType(elementContentType);
verify(mockWidgetRegistrar).getWidgetDescriptor(elementContentType);
verify(mockWidgetRegistrar).getWidgetRendererForWidgetDescriptor(any(WikiPageKey.class), anyString(), anyMap(), any(Callback.class), any(Long.class));
verify(mockView).addWidget(any(Widget.class), Mockito.eq(org.sagebionetworks.markdown.constants.WidgetConstants.DIV_ID_WIDGET_PREFIX + "0" + "-preview"));
}
@Test
public void testLoadMarkdownFromWikiEmpty() {
boolean isPreview = true;
String sampleHTML = "";
AsyncMockStubber.callSuccessWith(sampleHTML).when(mockSynapseClient).markdown2Html(anyString(), anyBoolean(), anyBoolean(), anyString(), any(AsyncCallback.class));
String markdown="input markdown that is transformed into empty html";
presenter.configure(markdown, mockWikiPageKey, isPreview, 1L);
ArgumentCaptor<Callback> callbackCaptor = ArgumentCaptor.forClass(Callback.class);
verify(mockView).callbackWhenAttached(callbackCaptor.capture());
callbackCaptor.getValue().invoke();
verify(mockSynapseClient).markdown2Html(anyString(), Mockito.eq(isPreview), anyBoolean(), anyString(), any(AsyncCallback.class));
verify(mockView).setEmptyVisible(true);
verify(mockView).clearMarkdown();
verify(mockView, Mockito.never()).setEmptyVisible(false);
}
@Test
public void testLoadMarkdownFromWikiPageFailure() {
boolean isPreview = true;
AsyncMockStubber.callFailureWith(caught).when(mockSynapseClient).getV2WikiPageAsV1(any(WikiPageKey.class), any(AsyncCallback.class));
presenter.loadMarkdownFromWikiPage(mockWikiPageKey, isPreview, false);
verify(mockSynAlert).showError(anyString());
}
}
|
package org.voovan.http.message;
import org.voovan.Global;
import org.voovan.http.message.packet.Cookie;
import org.voovan.http.message.packet.Part;
import org.voovan.http.server.context.WebContext;
import org.voovan.http.server.exception.HttpParserException;
import org.voovan.http.server.exception.RequestTooLarge;
import org.voovan.network.IoSession;
import org.voovan.tools.*;
import org.voovan.tools.buffer.ByteBufferChannel;
import org.voovan.tools.buffer.TByteBuffer;
import org.voovan.tools.hashwheeltimer.HashWheelTask;
import org.voovan.tools.security.THash;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
public class HttpParser {
private static final String PL_METHOD = "1";
private static final String PL_PATH = "2";
private static final String PL_PROTOCOL = "3";
private static final String PL_VERSION = "4";
private static final String PL_STATUS = "5";
private static final String PL_STATUS_CODE = "6";
private static final String PL_QUERY_STRING = "7";
private static final String HEADER_MARK = "8";
private static final String CACHE_FLAG = "9";
private static final String BODY_PARTS = "21";
private static final String BODY_VALUE = "22";
private static final String BODY_FILE = "22";
public static final String MULTIPART_FORM_DATA = "multipart/form-data";
public static final String UPLOAD_PATH = TFile.assemblyPath(TFile.getTemporaryPath(),"voovan", "webserver", "upload");
public static final String propertyLineRegex = ": ";
public static final String equalMapRegex = "([^ ;,]+=[^;,]+)";
public static FastThreadLocal<Map<String, Object>> THREAD_PACKET_MAP = FastThreadLocal.withInitial(()->new HashMap<String, Object>());
public static FastThreadLocal<Request> THREAD_REQUEST = FastThreadLocal.withInitial(()->new Request());
public static FastThreadLocal<Response> THREAD_RESPONSE = FastThreadLocal.withInitial(()->new Response());
private static FastThreadLocal<byte[]> THREAD_STRING_BUILDER = FastThreadLocal.withInitial(()->new byte[1024]);
private static ConcurrentHashMap<Long, Map<String, Object>> PACKET_MAP_CACHE = new ConcurrentHashMap<Long, Map<String, Object>>();
public static final int PARSER_TYPE_REQUEST = 0;
public static final int PARSER_TYPE_RESPONSE = 1;
static {
Global.getHashWheelTimer().addTask(new HashWheelTask() {
@Override
public void run() {
PACKET_MAP_CACHE.clear();
}
}, 60);
}
private HttpParser(){
}
// /**
// * HTTP Header
// * @param propertyLine
// * Http
// * @return
// */
// private static Map<String,String> parsePropertyLine(String propertyLine){
// Map<String,String> property = new HashMap<String, String>();
// int index = propertyLine.indexOf(propertyLineRegex);
// if(index > 0){
// String propertyName = propertyLine.substring(0, index);
// String properyValue = propertyLine.substring(index+2, propertyLine.length());
// property.put(fixHeaderName(propertyName), properyValue.trim());
// return property;
// /**
// * Http
// * @param headerName http
// * @return http
// */
// public static String fixHeaderName(String headerName) {
// if(headerName==null){
// return null;
// String[] headerNameSplits = headerName.split("-");
// StringBuilder stringBuilder = new StringBuilder();
// for(String headerNameSplit : headerNameSplits) {
// if(Character.isLowerCase(headerNameSplit.codePointAt(0))){
// stringBuilder.append((char)(headerNameSplit.codePointAt(0) - 32));
// stringBuilder.append(TString.removePrefix(headerNameSplit));
// } else {
// stringBuilder.append(headerNameSplit);
// stringBuilder.append("-");
// return TString.removeSuffix(stringBuilder.toString());
/**
* Map
* @param str
*
* @return Map
*/
public static Map<String, String> getEqualMap(String str){
Map<String, String> equalMap = new HashMap<String, String>();
String[] searchedStrings = TString.searchByRegex(str, equalMapRegex);
for(String groupString : searchedStrings){
// split
String[] equalStrings = new String[2];
int equalCharIndex= groupString.indexOf(Global.STR_EQUAL);
equalStrings[0] = groupString.substring(0,equalCharIndex);
equalStrings[1] = groupString.substring(equalCharIndex+1,groupString.length());
if(equalStrings.length==2){
String key = equalStrings[0];
String value = equalStrings[1];
if(value.startsWith(Global.STR_QUOTE) && value.endsWith(Global.STR_QUOTE)){
value = value.substring(1,value.length()-1);
}
equalMap.put(key, value);
}
}
return equalMap;
}
/**
* HTTP
* Content-Type: multipart/form-data; boundary=ujjLiiJBznFt70fG1F4EUCkIupn7H4tzm
* boundary.
* :getPerprotyEqualValue(packetMap,"Content-Type","boundary")ujjLiiJBznFt70fG1F4EUCkIupn7H4tzm
* @param propertyName
* @param valueName
* @return
*/
private static String getPerprotyEqualValue(Map<String,Object> packetMap,String propertyName,String valueName){
Object propertyValueObj = packetMap.get(propertyName);
if(propertyValueObj == null){
return null;
}
String propertyValue = propertyValueObj.toString();
Map<String, String> equalMap = getEqualMap(propertyValue);
return equalMap.get(valueName);
}
/**
* Cookie
* @param packetMap MAp
* @param cookieName Http Cookie
* @param cookieValue Http Cookie
*/
@SuppressWarnings("unchecked")
private static void parseCookie(Map<String, Object> packetMap,String cookieName, String cookieValue){
if(!packetMap.containsKey(HttpStatic.COOKIE_STRING)){
packetMap.put(HttpStatic.COOKIE_STRING, new ArrayList<Map<String, String>>());
}
List<Map<String, String>> cookies = (List<Map<String, String>>) packetMap.get(HttpStatic.COOKIE_STRING);
// Cookie
Map<String, String>cookieMap = getEqualMap(cookieValue);
// response cookie cookie
if(HttpStatic.SET_COOKIE_STRING.equalsIgnoreCase(cookieName)){
// cookie
if(cookieValue.toLowerCase().contains(HttpStatic.HTTPONLY_STRING)){
cookieMap.put(HttpStatic.HTTPONLY_STRING, Global.EMPTY_STRING);
}
if(cookieValue.toLowerCase().contains(HttpStatic.SECURE_STRING)){
cookieMap.put(HttpStatic.SECURE_STRING, Global.EMPTY_STRING);
}
cookies.add(cookieMap);
}
// request cookie cookie
else if(HttpStatic.COOKIE_STRING.equalsIgnoreCase(cookieName)){
for(Entry<String,String> cookieMapEntry: cookieMap.entrySet()){
HashMap<String, String> cookieOneMap = new HashMap<String, String>();
cookieOneMap.put(cookieMapEntry.getKey(), cookieMapEntry.getValue());
cookies.add(cookieOneMap);
}
}
}
/**
* body
* GZIP ,,
* @param packetMap
* @param contentBytes
* @return
* @throws IOException
*/
private static byte[] dealBodyContent(Map<String, Object> packetMap,byte[] contentBytes) throws IOException{
byte[] bytesValue;
if(contentBytes.length == 0 ){
return contentBytes;
}
// GZip
boolean isGZip = packetMap.get(HttpStatic.CONTENT_ENCODING_STRING)==null ? false : packetMap.get(HttpStatic.CONTENT_ENCODING_STRING).toString().contains(HttpStatic.GZIP_STRING);
// GZip
if(isGZip && contentBytes.length>0){
bytesValue = TZip.decodeGZip(contentBytes);
} else {
bytesValue = contentBytes;
}
return TObject.nullDefault(bytesValue,new byte[0]);
}
/**
* HTTP
* @param packetMap
* @param type
* @param byteBuffer ByteBuffer
* @param contiuneRead
* @param timeout
*/
public static int parserProtocol(Map<String, Object> packetMap, int type, ByteBuffer byteBuffer, Runnable contiuneRead, int timeout) {
byte[] bytes = THREAD_STRING_BUILDER.get();
int position = 0;
int hashCode = 0;
boolean isCache = type==PARSER_TYPE_REQUEST ? WebContext.isCache() : false;
// Protocol
int segment = 0;
String segment_1 = "";
String segment_2 = "";
String segment_3 = "";
int questPositiion = -1;
byte prevByte = '\0';
byte currentByte = '\0';
long start = System.currentTimeMillis();
while (true) {
while(!byteBuffer.hasRemaining()) {
contiuneRead.run();
if(System.currentTimeMillis() - start > timeout) {
throw new HttpParserException("HttpParser read failed");
}
}
currentByte = byteBuffer.get();
// Web ,
if(segment==0 && (currentByte == Global.BYTE_CR || currentByte == Global.BYTE_LF)){
continue;
}
if (currentByte == Global.BYTE_SPACE && segment < 2) {
if (segment == 0) {
HttpItem httpItem = HttpItem.getHttpItem(bytes, 0, position);
hashCode = hashCode + httpItem.getHashCode() << 1;
segment_1 = httpItem.getString();
} else if (segment == 1) {
HttpItem httpItem = HttpItem.getHttpItem(bytes, 0, position);
hashCode = hashCode + httpItem.getHashCode() << 2;
segment_2 =httpItem.getString();
}
position = 0;
segment++;
continue;
} else if (currentByte == Global.BYTE_QUESTION) {
if (segment == 1) {
questPositiion = byteBuffer.position();
continue;
}
} else if (prevByte == Global.BYTE_CR && currentByte == Global.BYTE_LF && segment == 2) {
HttpItem httpItem = HttpItem.getHttpItem(bytes, 0, position);
hashCode = hashCode + httpItem.getHashCode() << 3;
segment_3 =httpItem.getString();
position = 0;
break;
}
prevByte = currentByte;
if (currentByte == Global.BYTE_CR) {
continue;
}
bytes[position] = currentByte;
position++;
}
if (type == 0) {
packetMap.put(PL_METHOD, segment_1);
questPositiion = questPositiion - segment_1.length() - 1;
packetMap.put(PL_PATH, questPositiion > 0 ? segment_2.substring(0, questPositiion - 1) : segment_2);
if (questPositiion > 0) {
packetMap.put(PL_QUERY_STRING, segment_2.substring(questPositiion - 1));
}
if(segment_3.charAt(0)=='H' && segment_3.charAt(1)=='T' && segment_3.charAt(2)=='T' && segment_3.charAt(3)=='P') {
packetMap.put(PL_PROTOCOL, HttpStatic.HTTP.getString());
} else {
throw new HttpParserException("Not a http packet");
}
switch (segment_3.charAt(7)) {
case '1':
packetMap.put(PL_VERSION, HttpStatic.HTTP_11_STRING);
break;
case '0':
packetMap.put(PL_VERSION, HttpStatic.HTTP_10_STRING);
break;
case '9':
packetMap.put(PL_VERSION, HttpStatic.HTTP_09_STRING);
break;
default:
packetMap.put(PL_VERSION, HttpStatic.HTTP_11_STRING);
}
}
if (type == 1) {
if(segment_1.charAt(0)=='H' && segment_1.charAt(1)=='T' && segment_1.charAt(2)=='T' && segment_1.charAt(3)=='P') {
packetMap.put(PL_PROTOCOL, HttpStatic.HTTP.getString());
} else {
throw new HttpParserException("Not a http packet");
}
switch (segment_1.charAt(7)) {
case '1':
packetMap.put(PL_VERSION, HttpStatic.HTTP_11_STRING);
break;
case '0':
packetMap.put(PL_VERSION, HttpStatic.HTTP_10_STRING);
break;
case '9':
packetMap.put(PL_VERSION, HttpStatic.HTTP_09_STRING);
break;
default:
packetMap.put(PL_VERSION, HttpStatic.HTTP_11_STRING);
}
packetMap.put(PL_STATUS, segment_2);
packetMap.put(PL_STATUS_CODE, segment_3);
}
return hashCode;
}
/**
* HTTP
* @param packetMap
* @param byteBuffer ByteBuffer
* @param contiuneRead
* @param timeout
* @return true: Header, false: Header
*/
public static boolean parseHeader(Map<String, Object> packetMap, ByteBuffer byteBuffer, Runnable contiuneRead, int timeout) {
byte[] bytes = THREAD_STRING_BUILDER.get();
int position = 0;
boolean isCache = WebContext.isCache();
// Protocol
boolean onHeaderName = true;
byte prevByte = '\0';
byte currentByte = '\0';
String headerName = null;
String headerValue = null;
long start = System.currentTimeMillis();
while (true) {
while(!byteBuffer.hasRemaining()) {
contiuneRead.run();
if(System.currentTimeMillis() - start > timeout) {
throw new HttpParserException("HttpParser read failed");
}
}
currentByte = byteBuffer.get();
if (onHeaderName && prevByte == Global.BYTE_COLON && currentByte == Global.BYTE_SPACE) {
if(isCache) {
headerName = HttpItem.getHttpItem(bytes, 0, position).getString();
} else {
headerName = new String(bytes, 0, position);
}
onHeaderName = false;
position = 0;
continue;
} else if (!onHeaderName && prevByte == Global.BYTE_CR && currentByte == Global.BYTE_LF) {
if(isCache) {
headerValue = HttpItem.getHttpItem(bytes, 0, position).getString();
} else {
headerValue = new String(bytes, 0, position);
}
break;
}
//http
if (onHeaderName && prevByte == Global.BYTE_CR && currentByte == Global.BYTE_LF) {
return true;
}
prevByte = currentByte;
if (onHeaderName && currentByte == Global.BYTE_COLON) {
continue;
} else if (!onHeaderName && currentByte == Global.BYTE_CR) {
continue;
}
bytes[position] = currentByte;
position++;
}
if(headerName!=null && headerValue!=null) {
packetMap.put(headerName, headerValue);
}
return false;
// packetMap.put(fixHeaderName(headerName), headerValue);
}
/**
* HTTP
* Map ,:
* 1.protocol key/value
* 2.header key/value
* 3.cookie List[Map[String,String]]
* 3.part List[Map[Stirng,Object]](, HTTP )
* 5.body key=BODY_VALUE Map
* @param session socket
* @param packetMap map
* @param type , 0: Request, 1: Response
* @param byteBufferChannel
* @param timeout
* @param requestMaxSize , : kb
* @return Map
* @throws IOException IO
*/
public static Map<String, Object> parser(IoSession session, Map<String, Object> packetMap, int type,
ByteBufferChannel byteBufferChannel, int timeout,
long requestMaxSize) throws IOException {
int totalLength = 0;
long protocolMark = 0;
int headerMark = 0;
int protocolPosition = 0;
boolean hasBody = false;
boolean isCache = WebContext.isCache();
requestMaxSize = requestMaxSize < 0 ? Integer.MAX_VALUE : requestMaxSize;
// Socket
Runnable contiuneRead = ()->{
if(session==null || !session.isConnected()) {
throw new HttpParserException("Socket is disconnect");
}
session.getSocketSelector().eventChoose();
if(session.getReadByteBufferChannel().isReleased()) {
throw new HttpParserException("socket read buffer is released, may be Socket is disconnected");
}
};
//HTTP
while(byteBufferChannel.size() > 0) {
boolean findCache = false;
ByteBuffer innerByteBuffer = byteBufferChannel.getByteBuffer();
try {
{
protocolMark = parserProtocol(packetMap, type, innerByteBuffer, contiuneRead, timeout);
protocolPosition = innerByteBuffer.position() - 1;
if (isCache) {
for (Entry<Long, Map<String, Object>> packetMapCacheItem : PACKET_MAP_CACHE.entrySet()) {
long cachedMark = ((Long) packetMapCacheItem.getKey()).longValue();
long totalLengthInMark = (cachedMark << 32) >> 32;
if (totalLengthInMark > innerByteBuffer.limit()) {
continue;
}
if (byteBufferChannel.size() >= totalLengthInMark &&
byteBufferChannel.get((int) totalLengthInMark - 1) == 10 &&
byteBufferChannel.get((int) totalLengthInMark - 2) == 13) {
headerMark = THash.HashFNV1(innerByteBuffer, protocolPosition, (int) (totalLengthInMark - protocolPosition));
if (protocolMark + headerMark == cachedMark >> 32) {
innerByteBuffer.position((int) totalLengthInMark);
findCache = true;
packetMap = packetMapCacheItem.getValue();
break;
}
}
}
}
}
if(!findCache) {
{
while (!parseHeader(packetMap, innerByteBuffer, contiuneRead, timeout)) {
if (!innerByteBuffer.hasRemaining() && session.isConnected()) {
return null;
}
}
}
// Cookie
{
String cookieName = null;
String cookieValue = null;
if (type == PARSER_TYPE_REQUEST && packetMap.containsKey(HttpStatic.COOKIE_STRING)) {
cookieName = HttpStatic.COOKIE_STRING;
cookieValue = packetMap.get(HttpStatic.COOKIE_STRING).toString();
packetMap.remove(HttpStatic.COOKIE_STRING);
} else if (type == PARSER_TYPE_RESPONSE && packetMap.containsKey(HttpStatic.SET_COOKIE_STRING)) {
cookieName = HttpStatic.SET_COOKIE_STRING;
cookieValue = packetMap.get(HttpStatic.SET_COOKIE_STRING).toString();
packetMap.remove(HttpStatic.SET_COOKIE_STRING);
}
if (cookieName != null) {
parseCookie(packetMap, cookieName, cookieValue);
}
}
if (isCache) {
totalLength = innerByteBuffer.position();
headerMark = THash.HashFNV1(innerByteBuffer, protocolPosition, (int) (totalLength - protocolPosition));
long mark = (protocolMark + headerMark) << 32 | totalLength; // hash,
packetMap.put(HEADER_MARK, mark);
HashMap<String, Object> cachedPacketMap = new HashMap<String, Object>();
cachedPacketMap.putAll(packetMap);
cachedPacketMap.put(CACHE_FLAG, 1);
PACKET_MAP_CACHE.put(mark, cachedPacketMap);
}
}
} finally {
byteBufferChannel.compact();
}
if("GET".equals(packetMap.get(PL_METHOD)) || packetMap.containsKey(HttpStatic.CONTENT_TYPE_STRING)) {
hasBody = true;
} else {
// body
break;
}
// HTTP body
if(hasBody){
String contentType =packetMap.get(HttpStatic.CONTENT_TYPE_STRING)==null ? Global.EMPTY_STRING : packetMap.get(HttpStatic.CONTENT_TYPE_STRING).toString();
String transferEncoding = packetMap.get(HttpStatic.TRANSFER_ENCODING_STRING)==null ? "" : packetMap.get(HttpStatic.TRANSFER_ENCODING_STRING).toString();
//1. HTTP POST body part
if(contentType.contains(MULTIPART_FORM_DATA)){
// Part list
List<Map<String, Object>> bodyPartList = new ArrayList<Map<String, Object>>();
//boundary part
String boundary = TString.assembly("--", getPerprotyEqualValue(packetMap, HttpStatic.CONTENT_TYPE_STRING, HttpStatic.BOUNDARY_STRING));
ByteBuffer boundaryEnd = ByteBuffer.allocate(2);
while(true) {
if (!byteBufferChannel.waitData(boundary.getBytes(), timeout, contiuneRead)) {
throw new HttpParserException("Http Parser readFromChannel data error");
}
int boundaryIndex = byteBufferChannel.indexOf(boundary.getBytes(Global.CS_UTF_8));
// boundary
byteBufferChannel.shrink((boundaryIndex + boundary.length()));
// boundary
boundaryEnd.clear();
int readSize = byteBufferChannel.readHead(boundaryEnd);
totalLength = totalLength + readSize;
if(totalLength > requestMaxSize * 1024){
throw new RequestTooLarge("Request is too large: {max size: " + requestMaxSize*1024 + ", expect size: " + totalLength + "}");
}
// boundary , "--"
if (Arrays.equals(boundaryEnd.array(), "--".getBytes())) {
byteBufferChannel.shrink(2);
break;
}
byte[] boundaryMark = HttpStatic.BODY_MARK.getBytes();
if (!byteBufferChannel.waitData(boundaryMark, timeout, contiuneRead)) {
throw new HttpParserException("Http Parser readFromChannel data error");
}
int partHeadEndIndex = byteBufferChannel.indexOf(boundaryMark);
//Part
ByteBuffer partHeadBuffer = TByteBuffer.allocateDirect(partHeadEndIndex + 4);
byteBufferChannel.readHead(partHeadBuffer);
// Bytebuffer
ByteBufferChannel partByteBufferChannel = new ByteBufferChannel(partHeadEndIndex + 4);
partByteBufferChannel.writeEnd(partHeadBuffer);
Map<String, Object> partMap = new HashMap<String, Object>();
ByteBuffer partByteBuffer = partByteBufferChannel.getByteBuffer();
try {
while (parseHeader(partMap, partByteBuffer, contiuneRead, timeout)) {
if (!partByteBuffer.hasRemaining() && session.isConnected()) {
return null;
}
}
} finally {
partByteBufferChannel.compact();
}
TByteBuffer.release(partHeadBuffer);
partByteBufferChannel.release();
String fileName = getPerprotyEqualValue(partMap, HttpStatic.CONTENT_DISPOSITION_STRING, "filename");
if(fileName!=null && fileName.isEmpty()){
break;
}
// Part
// index
boundaryIndex = -1;
if (fileName == null) {
if (!byteBufferChannel.waitData(boundary.getBytes(), timeout, contiuneRead)) {
throw new HttpParserException("Http Parser readFromChannel data error");
}
boundaryIndex = byteBufferChannel.indexOf(boundary.getBytes(Global.CS_UTF_8));
ByteBuffer bodyByteBuffer = ByteBuffer.allocate(boundaryIndex - 2);
byteBufferChannel.readHead(bodyByteBuffer);
partMap.put(BODY_VALUE, bodyByteBuffer.array());
}
else {
String fileExtName = TFile.getFileExtension(fileName);
fileExtName = fileExtName==null || fileExtName.equals(Global.EMPTY_STRING) ? "tmp" : fileExtName;
String localFileName =TString.assembly(UPLOAD_PATH, Global.NAME, System.currentTimeMillis(), ".", fileExtName);
boolean isFileRecvDone = false;
while (true){
int dataLength = byteBufferChannel.size();
if (byteBufferChannel.waitData(boundary.getBytes(), 0, contiuneRead)) {
isFileRecvDone = true;
}
if(!isFileRecvDone) {
if(dataLength!=0) {
byteBufferChannel.saveToFile(localFileName, dataLength);
totalLength = totalLength + dataLength;
}
continue;
} else {
boundaryIndex = byteBufferChannel.indexOf(boundary.getBytes(Global.CS_UTF_8));
int length = boundaryIndex == -1 ? byteBufferChannel.size() : (boundaryIndex - 2);
if (boundaryIndex > 0) {
byteBufferChannel.saveToFile(localFileName, length);
totalLength = totalLength + dataLength;
}
}
if(totalLength > requestMaxSize * 1024){
TFile.deleteFile(new File(localFileName));
throw new RequestTooLarge("Request is too large: {max size: " + requestMaxSize*1024 + ", expect size: " + totalLength + "}");
}
if(!isFileRecvDone){
TEnv.sleep(100);
} else {
break;
}
}
if(boundaryIndex == -1){
new File(localFileName).delete();
throw new HttpParserException("Http Parser not enough data with " + boundary);
}else{
partMap.remove(BODY_VALUE);
partMap.put(BODY_FILE, localFileName.getBytes());
}
}
//bodyPartList
bodyPartList.add(partMap);
}
// part list packetMap
packetMap.put(BODY_PARTS, bodyPartList);
}
//2. HTTP body chunked
else if(HttpStatic.CHUNKED_STRING.equals(transferEncoding)){
ByteBufferChannel chunkedByteBufferChannel = new ByteBufferChannel(3);
String chunkedLengthLine = "";
while(chunkedLengthLine!=null){
if(!byteBufferChannel.waitData("\r\n".getBytes(), timeout, contiuneRead)){
throw new HttpParserException("Http Parser readFromChannel data error");
}
chunkedLengthLine = byteBufferChannel.readLine().trim();
if("0".equals(chunkedLengthLine)){
break;
}
if(chunkedLengthLine.isEmpty()){
continue;
}
int chunkedLength = 0;
//chunked
try {
chunkedLength = Integer.parseInt(chunkedLengthLine, 16);
}catch(Exception e){
e.printStackTrace();
break;
}
if(!byteBufferChannel.waitData(chunkedLength, timeout, contiuneRead)){
throw new HttpParserException("Http Parser readFromChannel data error");
}
int readSize = 0;
if(chunkedLength > 0) {
//chunked
ByteBuffer byteBuffer = TByteBuffer.allocateDirect(chunkedLength);
readSize = byteBufferChannel.readHead(byteBuffer);
totalLength = totalLength + readSize;
if(readSize != chunkedLength){
throw new HttpParserException("Http Parser readFromChannel chunked data error");
}
chunkedByteBufferChannel.writeEnd(byteBuffer);
TByteBuffer.release(byteBuffer);
}
if(totalLength > requestMaxSize * 1024){
throw new RequestTooLarge("Request is too large: {max size: " + requestMaxSize*1024 + ", expect size: " + totalLength + "}");
}
byteBufferChannel.shrink(2);
}
byte[] value = dealBodyContent(packetMap, chunkedByteBufferChannel.array());
chunkedByteBufferChannel.release();
packetMap.put(BODY_VALUE, value);
byteBufferChannel.shrink(2);
}
//3. HTTP() Content-Length , body
else if(packetMap.containsKey(HttpStatic.CONTENT_LENGTH_STRING)){
int contentLength = Integer.parseInt(packetMap.get(HttpStatic.CONTENT_LENGTH_STRING).toString());
totalLength = totalLength + contentLength;
if(totalLength > requestMaxSize * 1024){
throw new HttpParserException("Request is too large: {max size: " + requestMaxSize*1024 + ", expect size: " + totalLength + "}");
}
if(!byteBufferChannel.waitData(contentLength, timeout, contiuneRead)){
throw new HttpParserException("Http Parser readFromChannel data error");
}
ByteBuffer byteBuffer = ByteBuffer.allocate(contentLength);
byteBufferChannel.readHead(byteBuffer);
byte[] contentBytes = byteBuffer.array();
byte[] value = dealBodyContent(packetMap, contentBytes);
packetMap.put(BODY_VALUE, value);
}
break;
}
}
return packetMap;
}
/**
* HttpRequest
* @param session socket
* @param byteBufferChannel
* @param timeOut
* @param requestMaxSize , : kb
* @return
* @throws IOException IO
*/
@SuppressWarnings("unchecked")
public static Request parseRequest(IoSession session, ByteBufferChannel byteBufferChannel, int timeOut, long requestMaxSize) throws IOException {
boolean isCache = WebContext.isCache();
Request request = null;
Map<String, Object> packetMap = THREAD_PACKET_MAP.get();
packetMap = parser(session, packetMap, PARSER_TYPE_REQUEST, byteBufferChannel, timeOut, requestMaxSize);
//Map,
if(packetMap==null || packetMap.isEmpty() || byteBufferChannel.isReleased()){
return null;
}
request = THREAD_REQUEST.get();
request.clear();
boolean cacheFlag = false;
boolean bodyFlag = false;
boolean bodyPartFlag = false;
Set<Entry<String, Object>> parsedItems= packetMap.entrySet();
for(Entry<String, Object> parsedPacketEntry: parsedItems) {
String key = parsedPacketEntry.getKey();
switch (key) {
case CACHE_FLAG:
cacheFlag = true;
break;
case PL_METHOD:
request.protocol().setMethod(parsedPacketEntry.getValue().toString());
break;
case PL_PROTOCOL:
request.protocol().setProtocol(parsedPacketEntry.getValue().toString());
break;
case PL_QUERY_STRING:
request.protocol().setQueryString(parsedPacketEntry.getValue().toString());
break;
case PL_VERSION:
request.protocol().setVersion(parsedPacketEntry.getValue().toString());
break;
case PL_PATH:
request.protocol().setPath(parsedPacketEntry.getValue().toString());
break;
case HEADER_MARK:
request.setMark((Long)parsedPacketEntry.getValue());
break;
case HttpStatic.COOKIE_STRING:
List<Map<String, String>> cookieMap = (List<Map<String, String>>)packetMap.get(HttpStatic.COOKIE_STRING);
// Cookie, Cookie
for(Map<String,String> cookieMapItem : cookieMap){
Cookie cookie = Cookie.buildCookie(cookieMapItem);
request.cookies().add(cookie);
}
cookieMap.clear();
break;
case BODY_VALUE:
bodyFlag = true;
byte[] value = (byte[])(parsedPacketEntry.getValue());
request.body().write(value);
break;
case BODY_PARTS:
bodyFlag = true;
bodyPartFlag = true;
List<Map<String, Object>> parsedParts = (List<Map<String, Object>>)(parsedPacketEntry.getValue());
// part List, Part
for(Map<String, Object> parsedPartMap : parsedParts){
Part part = new Part();
// part Map, Part
for(Entry<String, Object> parsedPartMapItem : parsedPartMap.entrySet()){
// Value body
if(parsedPartMapItem.getKey().equals(BODY_VALUE)){
part.body().changeToBytes((byte[])parsedPartMapItem.getValue());
} if(parsedPartMapItem.getKey().equals(BODY_FILE)){
String filePath = new String((byte[])parsedPartMapItem.getValue());
part.body().changeToFile(new File(filePath));
} else {
// header
String partedHeaderKey = parsedPartMapItem.getKey();
String partedHeaderValue = parsedPartMapItem.getValue().toString();
part.header().put(partedHeaderKey, partedHeaderValue);
if(HttpStatic.CONTENT_DISPOSITION_STRING.equals(partedHeaderKey)){
//Content-Disposition"name=xxx",
Map<String, String> contentDispositionValue = HttpParser.getEqualMap(partedHeaderValue);
part.header().putAll(contentDispositionValue);
}
}
}
request.parts().add(part);
parsedPartMap.clear();
}
break;
default:
request.header().put(parsedPacketEntry.getKey(), parsedPacketEntry.getValue().toString());
break;
}
}
if(!cacheFlag) {
packetMap.clear();
}
if(isCache && bodyFlag) {
//MULTIPART_FORM_DATA
if(bodyPartFlag) {
request.setMark(null);
packetMap.clear();
} else if (request.getMark() != null && bodyFlag) {
Integer bodyMark = request.body().getMark();
request.setMark(request.getMark() | bodyMark);
}
}
return request;
}
/**
* HttpResponse
* @param session socket
* @param byteBufferChannel
* @param timeOut
* @return
* @throws IOException IO
*/
@SuppressWarnings("unchecked")
public static Response parseResponse(IoSession session, ByteBufferChannel byteBufferChannel, int timeOut) throws IOException {
Map<String, Object> packetMap = THREAD_PACKET_MAP.get();
packetMap = parser(session, packetMap, PARSER_TYPE_RESPONSE, byteBufferChannel, timeOut, -1);
packetMap.remove(HEADER_MARK);
//Map,
if(packetMap==null || packetMap.isEmpty() || byteBufferChannel.isReleased()){
return null;
}
Response response = THREAD_RESPONSE.get();
response.clear();
Set<Entry<String, Object>> parsedItems= packetMap.entrySet();
for(Entry<String, Object> parsedPacketEntry: parsedItems){
String key = parsedPacketEntry.getKey();
switch (key) {
case PL_PROTOCOL:
response.protocol().setProtocol(parsedPacketEntry.getValue().toString());
break;
case PL_VERSION:
response.protocol().setVersion(parsedPacketEntry.getValue().toString());
break;
case PL_STATUS:
response.protocol().setStatus(Integer.parseInt(parsedPacketEntry.getValue().toString()));
break;
case PL_STATUS_CODE:
response.protocol().setStatusCode(parsedPacketEntry.getValue().toString());
break;
case HttpStatic.COOKIE_STRING:
List<Map<String, String>> cookieMap = (List<Map<String, String>>)parsedPacketEntry.getValue();
// Cookie, Cookie
for(Map<String,String> cookieMapItem : cookieMap){
Cookie cookie = Cookie.buildCookie(cookieMapItem);
response.cookies().add(cookie);
}
break;
case BODY_VALUE:
response.body().write((byte[])parsedPacketEntry.getValue());
break;
default:
response.header().put(parsedPacketEntry.getKey(), parsedPacketEntry.getValue().toString());
break;
}
}
packetMap.clear();
return response;
}
public static void resetThreadLocal(){
THREAD_REQUEST.set(new Request());
THREAD_RESPONSE.set(new Response());
}
}
|
package hex.deeplearning;
import javax.imageio.ImageIO;
import hex.ModelMetricsMultinomial;
import org.junit.BeforeClass;
import org.junit.Test;
import water.TestUtil;
import water.fvec.*;
import water.fvec.Frame;
import water.gpu.ImageIter;
import water.gpu.ImagePred;
import water.gpu.ImageTrain;
import water.gpu.util;
import water.util.RandomUtils;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Random;
public class DeepWaterTest extends TestUtil {
@BeforeClass
public static void stall() { stall_till_cloudsize(1); }
@Test
public void inceptionPrediction() throws IOException {
// load the cuda lib in CUDA_PATH, optional. theoretically we can find them if they are in LD_LIBRARY_PATH
// util.loadCudaLib();
util.loadNativeLib("mxnet");
util.loadNativeLib("Native");
BufferedImage img = ImageIO.read(new File("/users/arno/deepwater/test/test2.jpg"));
int w = 224, h = 224;
BufferedImage scaledImg = new BufferedImage(w, h, img.getType());
Graphics2D g2d = scaledImg.createGraphics();
g2d.drawImage(img, 0, 0, w, h, null);
g2d.dispose();
float[] pixels = new float[w * h * 3];
int r_idx = 0;
int g_idx = r_idx + w * h;
int b_idx = g_idx + w * h;
for (int i = 0; i < h; i++) {
for (int j = 0; j < w; j++) {
Color mycolor = new Color(scaledImg.getRGB(j, i));
int red = mycolor.getRed();
int green = mycolor.getGreen();
int blue = mycolor.getBlue();
pixels[r_idx] = red; r_idx++;
pixels[g_idx] = green; g_idx++;
pixels[b_idx] = blue; b_idx++;
}
}
ImagePred m = new ImagePred();
// the path to Inception model
m.setModelPath("/users/arno/deepwater/Inception");
m.loadInception();
System.out.println("\n\n" + m.predict(pixels)+"\n\n");
}
@Test
public void inceptionFineTuning() throws IOException {
// util.loadCudaLib();
util.loadNativeLib("mxnet");
util.loadNativeLib("Native");
String path = "/users/arno/kaggle/statefarm/input/";
BufferedReader br = new BufferedReader(new FileReader(new File(path+"driver_imgs_list.csv")));
ArrayList<Float> train_labels = new ArrayList<>();
ArrayList<String> train_data = new ArrayList<>();
String line;
br.readLine(); //skip header
while ((line = br.readLine()) != null) {
String[] tmp = line.split(",");
train_labels.add(new Float(tmp[1].substring(1)).floatValue());
train_data.add(path+"train/"+tmp[1]+"/"+tmp[2]);
}
br.close();
int batch_size = 32;
int classes = 10;
ImageTrain m = new ImageTrain();
m.buildNet(classes, batch_size, "inception_bn");
int max_iter = 1; //epochs
for (int iter = 0; iter < max_iter; iter++) {
//each iteration does a different random shuffle
Random rng = RandomUtils.getRNG(0);
rng.setSeed(0xDECAF+0xD00D*iter);
Collections.shuffle(train_labels,rng);
rng.setSeed(0xDECAF+0xD00D*iter);
Collections.shuffle(train_data,rng);
ImageIter img_iter = new ImageIter(train_data, train_labels, batch_size, 224, 224);
while(img_iter.Next()){
float[] data = img_iter.getData();
float[] labels = img_iter.getLabel();
float[] pred = m.train(data, labels);
Vec[] classprobs = new Vec[classes];
String[] names = new String[classes];
for (int i=0;i<classes;++i) {
names[i] = "c" + i;
double[] vals=new double[batch_size];
for (int j = 0; j < batch_size; ++j) {
int idx=i*batch_size+j; //[p0,...,p9,p0,...,p9, ... ,p0,...,p9]
vals[j] = pred[idx];
}
classprobs[i] = Vec.makeVec(vals,Vec.newKey());
}
water.fvec.Frame preds = new Frame(names,classprobs);
long[] lab = new long[batch_size];
for (int i=0;i<batch_size;++i)
lab[i] = (long)labels[i];
Vec actual = Vec.makeVec(lab,names,Vec.newKey());
ModelMetricsMultinomial mm = ModelMetricsMultinomial.make(preds,actual);
System.out.println(mm.toString());
}
}
scoreTestSet(path,classes,m);
}
public static void scoreTestSet(String path, int classes, ImageTrain m) throws IOException {
// make test set predictions
BufferedReader br = new BufferedReader(new FileReader(new File(path+"test_list.csv"))); //file created with 'cut -d, -f1 sample_submission.csv > test_list.csv'
ArrayList<Float> test_labels = new ArrayList<>();
ArrayList<String> test_data = new ArrayList<>();
String line;
while ((line = br.readLine()) != null) {
test_labels.add(new Float(-999)); //dummy
test_data.add(path+"test/"+line);
}
br.close();
FileWriter fw = new FileWriter(path+"/submission.csv");
int batch_size = 1; //avoid issues with batching at the end of the test set
ImageIter img_iter = new ImageIter(test_data, test_labels, batch_size, 224, 224);
fw.write("img,c0,c1,c2,c3,c4,c5,c6,c7,c8,c9\n");
while(img_iter.Next()) {
float[] data = img_iter.getData();
float[] labels = img_iter.getLabel();
String[] files = img_iter.getFiles();
float[] pred = m.predict(data, labels);
for (int i=0;i<batch_size;++i) {
String file = files[i];
String[] pcs = file.split("/");
fw.write(pcs[pcs.length-1]);
for (int j=0;j<classes;++j) {
int idx=i*classes+j;
fw.write(","+pred[idx]);
}
fw.write("\n");
}
}
fw.close();
}
}
|
package com.structurizr.example.spring.petclinic;
import com.structurizr.Workspace;
import com.structurizr.api.StructurizrClient;
import com.structurizr.componentfinder.ComponentFinder;
import com.structurizr.componentfinder.JavadocComponentFinderStrategy;
import com.structurizr.componentfinder.SpringComponentFinderStrategy;
import com.structurizr.io.json.JsonWriter;
import com.structurizr.model.*;
import com.structurizr.view.*;
import java.io.File;
import java.io.StringWriter;
public class SpringPetClinic {
public static void main(String[] args) throws Exception {
Workspace workspace = new Workspace("Spring PetClinic",
"This is a C4 representation of the Spring PetClinic sample app (https://github.com/spring-projects/spring-petclinic/)");
Model model = workspace.getModel();
// create the basic model (the stuff we can't get from the code)
SoftwareSystem springPetClinic = model.addSoftwareSystem("Spring PetClinic", "Allows employees to view and manage information regarding the veterinarians, the clients, and their pets.");
Person clinicEmployee = model.addPerson("Clinic Employee", "An employee of the clinic");
clinicEmployee.uses(springPetClinic, "Uses");
Container webApplication = springPetClinic.addContainer(
"Web Application", "Allows employees to view and manage information regarding the veterinarians, the clients, and their pets.", "Apache Tomcat 7.x");
Container relationalDatabase = springPetClinic.addContainer(
"Relational Database", "Stores information regarding the veterinarians, the clients, and their pets.", "HSQLDB");
clinicEmployee.uses(webApplication, "Uses", "HTTP");
webApplication.uses(relationalDatabase, "Reads from and writes to", "JDBC, port 9001");
// and now automatically find all Spring @Controller, @Component, @Service and @Repository components
ComponentFinder componentFinder = new ComponentFinder(
webApplication, "org.springframework.samples.petclinic",
new SpringComponentFinderStrategy(),
new JavadocComponentFinderStrategy(new File("/Users/simon/Documents/sandbox/spring/spring-petclinic/src/main/java/"), 150));
componentFinder.findComponents();
// connect the user to all of the Spring MVC controllers
webApplication.getComponents().stream()
.filter(c -> c.getTechnology().equals("Spring MVC Controller"))
.forEach(c -> clinicEmployee.uses(c, "Uses"));
// connect all of the repository components to the relational database
webApplication.getComponents().stream()
.filter(c -> c.getTechnology().equals("Spring Repository"))
.forEach(c -> c.uses(relationalDatabase, "Reads from and writes to"));
// finally create some views
ViewSet viewSet = workspace.getViews();
SystemContextView contextView = viewSet.createContextView(springPetClinic);
contextView.setKey("context");
contextView.addAllSoftwareSystems();
contextView.addAllPeople();
ContainerView containerView = viewSet.createContainerView(springPetClinic);
containerView.setKey("containers");
containerView.addAllPeople();
containerView.addAllSoftwareSystems();
containerView.addAllContainers();
ComponentView componentView = viewSet.createComponentView(webApplication);
componentView.setKey("components");
componentView.addAllComponents();
componentView.addAllPeople();
componentView.add(relationalDatabase);
// link the architecture model with the code
for (Component component : webApplication.getComponents()) {
if (component.getSourcePath() != null) {
component.setSourcePath(component.getSourcePath().replace(
"/Users/simon/Documents/sandbox/spring/spring-petclinic/",
"https://github.com/spring-projects/spring-petclinic/tree/master/"));
}
}
// tag and style some elements
springPetClinic.addTags("Spring PetClinic");
webApplication.getComponents().stream().filter(c -> c.getTechnology().equals(SpringComponentFinderStrategy.SPRING_MVC_CONTROLLER)).forEach(c -> c.addTags("Spring MVC Controller"));
webApplication.getComponents().stream().filter(c -> c.getTechnology().equals(SpringComponentFinderStrategy.SPRING_SERVICE)).forEach(c -> c.addTags("Spring Service"));
webApplication.getComponents().stream().filter(c -> c.getTechnology().equals(SpringComponentFinderStrategy.SPRING_REPOSITORY)).forEach(c -> c.addTags("Spring Repository"));
relationalDatabase.addTags("Database");
viewSet.getConfiguration().getStyles().add(new ElementStyle("Spring PetClinic", null, null, "#6CB33E", "white", null));
viewSet.getConfiguration().getStyles().add(new ElementStyle(Tags.PERSON, null, null, "#519823", "white", null, Shape.Person));
viewSet.getConfiguration().getStyles().add(new ElementStyle(Tags.CONTAINER, null, null, "#91D366", "white", null));
viewSet.getConfiguration().getStyles().add(new ElementStyle("Database", null, null, null, null, null, Shape.Cylinder));
viewSet.getConfiguration().getStyles().add(new ElementStyle("Spring MVC Controller", null, null, "#D4F3C0", "black", null));
viewSet.getConfiguration().getStyles().add(new ElementStyle("Spring Service", null, null, "#6CB33E", "black", null));
viewSet.getConfiguration().getStyles().add(new ElementStyle("Spring Repository", null, null, "#95D46C", "black", null));
StructurizrClient structurizrClient = new StructurizrClient("https://api.structurizr.com", "key", "secret");
structurizrClient.mergeWorkspace(1, workspace);
}
}
|
package hex.tree.drf;
import hex.tree.CompressedTree;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import water.DKV;
import water.Key;
import water.TestUtil;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.Frame;
import water.fvec.Vec;
import static water.serial.ModelSerializationTest.assertTreeEquals;
import static water.serial.ModelSerializationTest.getTrees;
public class DRFCheckpointTest extends TestUtil {
@BeforeClass public static void stall() { stall_till_cloudsize(1); }
/** Test if reconstructed initial frame match the last iteration
* of DRF model builder.
*
* <p>This test verify multinominal model.</p>
*/
@Test
public void testCheckpointReconstruction4Multinomial() {
testCheckPointReconstruction("smalldata/iris/iris.csv", 4, true, 5, 3);
}
/** Test if reconstructed initial frame match the last iteration
* of DRF model builder.
*
* <p>This test verify binominal model.</p>
*/
@Test
public void testCheckpointReconstruction4Binomial() {
testCheckPointReconstruction("smalldata/logreg/prostate.csv", 1, true, 5, 3);
}
/** Test throwing the right exception if non-modifiable parameter is specified.
*/
@Test(expected = H2OIllegalArgumentException.class)
public void testCheckpointWrongParams() {
testCheckPointReconstruction("smalldata/iris/iris.csv", 4, true, 5, 3, 0.2f, 0.67f);
}
/** Test if reconstructed initial frame match the last iteration
* of DRF model builder.
*
* <p>This test verify regression model.</p>
*/
@Test
public void testCheckpointReconstruction4Regression() {
testCheckPointReconstruction("smalldata/logreg/prostate.csv", 8, false, 5, 3);
}
private void testCheckPointReconstruction(String dataset,
int responseIdx,
boolean classification,
int ntreesInPriorModel, int ntreesInNewModel) {
testCheckPointReconstruction(dataset, responseIdx, classification, ntreesInPriorModel, ntreesInNewModel, 0.632f, 0.632f);
}
private void testCheckPointReconstruction(String dataset,
int responseIdx,
boolean classification,
int ntreesInPriorModel, int ntreesInNewModel,
float sampleRateInPriorModel, float sampleRateInNewModel) {
Frame f = parse_test_file(dataset);
// If classification turn response into categorical
if (classification) {
Vec respVec = f.vec(responseIdx);
f.replace(responseIdx, respVec.toCategorical()).remove();
DKV.put(f._key, f);
}
DRFModel model = null;
DRFModel modelFromCheckpoint = null;
DRFModel modelFinal = null;
try {
DRFModel.DRFParameters drfParams = new DRFModel.DRFParameters();
drfParams._model_id = Key.make("Initial model");
drfParams._train = f._key;
drfParams._response_column = f.name(responseIdx);
drfParams._ntrees = ntreesInPriorModel;
drfParams._seed = 42;
drfParams._max_depth = 10;
drfParams._score_each_iteration = true;
drfParams._sample_rate = sampleRateInPriorModel;
model = new DRF(drfParams).trainModel().get();
DRFModel.DRFParameters drfFromCheckpointParams = new DRFModel.DRFParameters();
drfFromCheckpointParams._model_id = Key.make("Model from checkpoint");
drfFromCheckpointParams._train = f._key;
drfFromCheckpointParams._response_column = f.name(responseIdx);
drfFromCheckpointParams._ntrees = ntreesInPriorModel + ntreesInNewModel;
drfFromCheckpointParams._seed = 42;
drfFromCheckpointParams._checkpoint = model._key;
drfFromCheckpointParams._score_each_iteration = true;
drfFromCheckpointParams._max_depth = 10;
drfFromCheckpointParams._sample_rate = sampleRateInNewModel;
modelFromCheckpoint = new DRF(drfFromCheckpointParams).trainModel().get();
// Compute a separated model containing the same number of trees as a model built from checkpoint
DRFModel.DRFParameters drfFinalParams = new DRFModel.DRFParameters();
drfFinalParams._model_id = Key.make("Validation model");
drfFinalParams._train = f._key;
drfFinalParams._response_column = f.name(responseIdx);
drfFinalParams._ntrees = ntreesInPriorModel + ntreesInNewModel;
drfFinalParams._seed = 42;
drfFinalParams._score_each_iteration = true;
drfFinalParams._max_depth = 10;
modelFinal = new DRF(drfFinalParams).trainModel().get();
CompressedTree[][] treesFromCheckpoint = getTrees(modelFromCheckpoint);
CompressedTree[][] treesFromFinalModel = getTrees(modelFinal);
assertTreeEquals("The model created from checkpoint and corresponding model created from scratch should have the same trees!",
treesFromCheckpoint, treesFromFinalModel, true);
// Make sure we are not re-using trees
for (int tree = 0; tree < treesFromCheckpoint.length; tree++) {
for (int clazz = 0; clazz < treesFromCheckpoint[tree].length; clazz++) {
if (treesFromCheckpoint[tree][clazz] !=null) { // We already verify equality of models
CompressedTree a = treesFromCheckpoint[tree][clazz];
CompressedTree b = treesFromFinalModel[tree][clazz];
Assert.assertNotEquals(a._key, b._key);
}
}
}
} finally {
if (f!=null) f.delete();
if (model!=null) model.delete();
if (modelFromCheckpoint!=null) modelFromCheckpoint.delete();
if (modelFinal!=null) modelFinal.delete();
}
}
}
|
package io.enmasse.systemtest.bases.authz;
import io.enmasse.address.model.Address;
import io.enmasse.address.model.AddressBuilder;
import io.enmasse.address.model.AddressSpace;
import io.enmasse.systemtest.UserCredentials;
import io.enmasse.systemtest.amqp.AmqpClient;
import io.enmasse.systemtest.amqp.UnauthorizedAccessException;
import io.enmasse.systemtest.bases.TestBase;
import io.enmasse.systemtest.bases.shared.ITestBaseShared;
import io.enmasse.systemtest.logs.CustomLogger;
import io.enmasse.systemtest.model.address.AddressType;
import io.enmasse.systemtest.model.addressplan.DestinationPlan;
import io.enmasse.systemtest.model.addressspace.AddressSpaceType;
import io.enmasse.systemtest.utils.AddressUtils;
import io.enmasse.systemtest.utils.UserUtils;
import io.enmasse.user.model.v1.Operation;
import io.enmasse.user.model.v1.User;
import io.enmasse.user.model.v1.UserAuthorizationBuilder;
import io.vertx.proton.sasl.SaslSystemException;
import org.apache.qpid.proton.message.Message;
import org.slf4j.Logger;
import javax.security.sasl.AuthenticationException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public abstract class AuthorizationTestBase extends TestBase implements ITestBaseShared {
private static Logger log = CustomLogger.getLogger();
private Address queue;
private Address topic;
private Address anycast;
private Address multicast;
private List<Address> addresses;
private void initAddresses() throws Exception {
queue = new AddressBuilder()
.withNewMetadata()
.withNamespace(getSharedAddressSpace().getMetadata().getNamespace())
.withName(AddressUtils.generateAddressMetadataName(getSharedAddressSpace(), "authz_queue"))
.endMetadata()
.withNewSpec()
.withType("queue")
.withAddress("authz_queue")
.withPlan(getDefaultPlan(AddressType.QUEUE))
.endSpec()
.build();
topic = new AddressBuilder()
.withNewMetadata()
.withNamespace(getSharedAddressSpace().getMetadata().getNamespace())
.withName(AddressUtils.generateAddressMetadataName(getSharedAddressSpace(), "authz-topic"))
.endMetadata()
.withNewSpec()
.withType("topic")
.withAddress("authz-topic")
.withPlan(getDefaultPlan(AddressType.TOPIC))
.endSpec()
.build();
anycast = new AddressBuilder()
.withNewMetadata()
.withNamespace(getSharedAddressSpace().getMetadata().getNamespace())
.withName(AddressUtils.generateAddressMetadataName(getSharedAddressSpace(), "authz-anycast"))
.endMetadata()
.withNewSpec()
.withType("anycast")
.withAddress("authz-anycast")
.withPlan(DestinationPlan.STANDARD_SMALL_ANYCAST)
.endSpec()
.build();
multicast = new AddressBuilder()
.withNewMetadata()
.withNamespace(getSharedAddressSpace().getMetadata().getNamespace())
.withName(AddressUtils.generateAddressMetadataName(getSharedAddressSpace(), "authz-multicast"))
.endMetadata()
.withNewSpec()
.withType("multicast")
.withAddress("authz-multicast")
.withPlan(DestinationPlan.STANDARD_SMALL_MULTICAST)
.endSpec()
.build();
addresses = new ArrayList<>();
addresses.add(queue);
addresses.add(topic);
if (getAddressSpaceType() == AddressSpaceType.STANDARD) {
addresses.add(anycast);
addresses.add(multicast);
}
resourcesManager.setAddresses(addresses.toArray(new Address[0]));
}
protected void doTestSendAuthz() throws Exception {
initAddresses();
UserCredentials allowedUser = new UserCredentials("sender", "senderPa55");
UserCredentials noAllowedUser = new UserCredentials("notallowedsender", "nobodyPa55");
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(allowedUser)
.editSpec()
.withAuthorization(
Collections.singletonList(new UserAuthorizationBuilder().withAddresses("*").withOperations(Operation.send).build()))
.endSpec()
.done());
Thread.sleep(100);
assertSend(allowedUser);
resourcesManager.removeUser(getSharedAddressSpace(), allowedUser.getUsername());
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(allowedUser)
.editSpec()
.withAuthorization(
Collections.singletonList(new UserAuthorizationBuilder()
.withAddresses(addresses.stream().map(address -> address.getSpec().getAddress()).collect(Collectors.toList()))
.withOperations(Operation.send).build()))
.endSpec()
.done());
Thread.sleep(100);
assertSend(allowedUser);
resourcesManager.removeUser(getSharedAddressSpace(), allowedUser.getUsername());
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(noAllowedUser).done());
assertCannotSend(noAllowedUser);
resourcesManager.removeUser(getSharedAddressSpace(), noAllowedUser.getUsername());
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(noAllowedUser)
.editSpec()
.withAuthorization(
Collections.singletonList(new UserAuthorizationBuilder()
.withAddresses("*")
.withOperations(Operation.recv).build()))
.endSpec()
.done());
assertCannotSend(noAllowedUser);
resourcesManager.removeUser(getSharedAddressSpace(), noAllowedUser.getUsername());
}
protected void doTestReceiveAuthz() throws Exception {
initAddresses();
UserCredentials allowedUser = new UserCredentials("receiver", "receiverPa55");
UserCredentials noAllowedUser = new UserCredentials("notallowedreceiver", "nobodyPa55");
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(allowedUser)
.editSpec()
.withAuthorization(
Collections.singletonList(new UserAuthorizationBuilder().withAddresses("*").withOperations(Operation.recv).build()))
.endSpec()
.done());
assertReceive(allowedUser);
resourcesManager.removeUser(getSharedAddressSpace(), allowedUser.getUsername());
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(allowedUser)
.editSpec()
.withAuthorization(
Collections.singletonList(new UserAuthorizationBuilder()
.withAddresses(addresses.stream().map(address -> address.getSpec().getAddress()).collect(Collectors.toList()))
.withOperations(Operation.recv).build()))
.endSpec()
.done());
assertReceive(allowedUser);
resourcesManager.removeUser(getSharedAddressSpace(), allowedUser.getUsername());
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(noAllowedUser)
.editSpec()
.withAuthorization(
Collections.singletonList(new UserAuthorizationBuilder()
.withAddresses("*")
.withOperations(Operation.send).build()))
.endSpec()
.done());
assertCannotReceive(noAllowedUser);
resourcesManager.removeUser(getSharedAddressSpace(), noAllowedUser.getUsername());
}
protected void doTestUserPermissionAfterRemoveAuthz() throws Exception {
initAddresses();
UserCredentials user = new UserCredentials("pepa", "pepaPa55");
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(user)
.editSpec()
.withAuthorization(
Collections.singletonList(new UserAuthorizationBuilder()
.withOperations(Operation.recv)
.withAddresses("*").build()))
.endSpec()
.done());
assertReceive(user);
resourcesManager.removeUser(getSharedAddressSpace(), user.getUsername());
Thread.sleep(5000);
resourcesManager.createOrUpdateUser(getSharedAddressSpace(), UserUtils.createUserResource(user)
.editSpec()
.withAuthorization(
Collections.singletonList(new UserAuthorizationBuilder()
.withOperations(Operation.recv)
.withAddresses("pepa_address").build()))
.endSpec()
.done());
assertCannotReceive(user);
resourcesManager.removeUser(getSharedAddressSpace(), user.getUsername());
}
protected void doTestSendAuthzWithWIldcards() throws Exception {
List<Address> addresses = getAddressesWildcard(getSharedAddressSpace());
List<User> users = createUsersWildcard(getSharedAddressSpace(), Operation.send);
resourcesManager.setAddresses(addresses.toArray(new Address[0]));
for (User user : users) {
for (Address destination : addresses) {
assertSendWildcard(user, destination);
}
resourcesManager.removeUser(getSharedAddressSpace(), user.getSpec().getUsername());
}
}
protected void doTestReceiveAuthzWithWIldcards() throws Exception {
List<Address> addresses = getAddressesWildcard(getSharedAddressSpace());
List<User> users = createUsersWildcard(getSharedAddressSpace(), Operation.recv);
resourcesManager.setAddresses(addresses.toArray(new Address[0]));
for (User user : users) {
for (Address destination : addresses) {
assertReceiveWildcard(user, destination);
}
resourcesManager.removeUser(getSharedAddressSpace(), user.getSpec().getUsername());
}
}
// Help methods
private void assertSendWildcard(User user, Address destination) throws Exception {
List<String> addresses = user.getSpec().getAuthorization().stream()
.map(authz -> authz.getAddresses().stream())
.flatMap(Stream::distinct)
.collect(Collectors.toList());
UserCredentials credentials = UserUtils.getCredentialsFromUser(user);
if (addresses.stream().filter(address -> destination.getSpec().getAddress().contains(address.replace("*", ""))).count() > 0) {
assertTrue(canSend(destination, credentials),
String.format("Authz failed, user %s cannot send message to destination %s", credentials,
destination.getSpec().getAddress()));
} else {
assertFalse(canSend(destination, credentials),
String.format("Authz failed, user %s can send message to destination %s", credentials,
destination.getSpec().getAddress()));
}
}
private void assertReceiveWildcard(User user, Address destination) throws Exception {
List<String> addresses = user.getSpec().getAuthorization().stream()
.map(authz -> authz.getAddresses().stream())
.flatMap(Stream::distinct)
.collect(Collectors.toList());
UserCredentials credentials = UserUtils.getCredentialsFromUser(user);
if (addresses.stream().filter(address -> destination.getSpec().getAddress().contains(address.replace("*", ""))).count() > 0) {
assertTrue(canReceive(destination, credentials),
String.format("Authz failed, user %s cannot receive message from destination %s", credentials,
destination.getSpec().getAddress()));
} else {
assertFalse(canReceive(destination, credentials),
String.format("Authz failed, user %s can receive message from destination %s", credentials,
destination.getSpec().getAddress()));
}
}
private void assertSend(UserCredentials credentials) throws Exception {
log.info("Testing if client is authorized to send messages");
String message = String.format("Authz failed, user %s cannot send message", credentials);
assertTrue(canSend(queue, credentials), message);
assertTrue(canSend(topic, credentials), message);
if (getAddressSpaceType() == AddressSpaceType.STANDARD) {
assertTrue(canSend(multicast, credentials), message);
assertTrue(canSend(anycast, credentials), message);
}
}
private void assertCannotSend(UserCredentials credentials) throws Exception {
log.info("Testing if client is NOT authorized to send messages");
String message = String.format("Authz failed, user %s can send message", credentials);
assertFalse(canSend(queue, credentials), message);
assertFalse(canSend(topic, credentials), message);
if (getAddressSpaceType() == AddressSpaceType.STANDARD) {
assertFalse(canSend(multicast, credentials), message);
assertFalse(canSend(anycast, credentials), message);
}
}
private void assertReceive(UserCredentials credentials) throws Exception {
log.info("Testing if client is authorized to receive messages");
String message = String.format("Authz failed, user %s cannot receive message", credentials);
assertTrue(canReceive(queue, credentials), message);
assertTrue(canReceive(topic, credentials), message);
if (getAddressSpaceType() == AddressSpaceType.STANDARD) {
assertTrue(canReceive(multicast, credentials), message);
assertTrue(canReceive(anycast, credentials), message);
}
}
private void assertCannotReceive(UserCredentials credentials) throws Exception {
log.info("Testing if client is NOT authorized to receive messages");
String message = String.format("Authz failed, user %s can receive message", credentials);
assertFalse(canReceive(queue, credentials), message);
assertFalse(canReceive(topic, credentials), message);
if (getAddressSpaceType() == AddressSpaceType.STANDARD) {
assertFalse(canReceive(multicast, credentials), message);
assertFalse(canReceive(anycast, credentials), message);
}
}
private boolean canSend(Address destination, UserCredentials credentials) throws Exception {
logWithSeparator(log,
String.format("Try send message under user %s from %s %s", credentials, destination.getSpec().getType(), destination.getSpec().getAddress()),
String.format("***** Try to open sender client under user %s", credentials),
String.format("***** Try to open receiver client under user %s", defaultCredentials));
AmqpClient sender = createClient(destination, credentials);
AmqpClient receiver = createClient(destination, defaultCredentials);
logWithSeparator(log);
return canAuth(sender, receiver, destination, true);
}
private boolean canReceive(Address destination, UserCredentials credentials) throws Exception {
logWithSeparator(log,
String.format("Try receive message under user %s from %s %s", credentials, destination.getSpec().getType(), destination.getSpec().getAddress()),
String.format("***** Try to open sender client under user %s", defaultCredentials),
String.format("***** Try to open receiver client under user %s", credentials));
AmqpClient sender = createClient(destination, defaultCredentials);
AmqpClient receiver = createClient(destination, credentials);
logWithSeparator(log);
return canAuth(sender, receiver, destination, false);
}
private boolean canAuth(AmqpClient sender, AmqpClient receiver, Address destination, boolean checkSender) throws Exception {
try {
log.info("Staring receiver for " + destination.getSpec().getAddress());
Future<List<Message>> received = receiver.recvMessages(destination.getSpec().getAddress(), 1);
log.info("Staring sender for " + destination.getSpec().getAddress());
Future<Integer> sent = sender.sendMessages(destination.getSpec().getAddress(), Collections.singletonList("msg1"));
if (checkSender) {
int numSent = sent.get(1, TimeUnit.MINUTES);
log.info("Sent {}", numSent);
int numReceived = received.get(1, TimeUnit.MINUTES).size();
return numSent == numReceived;
} else {
int numReceived = received.get(1, TimeUnit.MINUTES).size();
int numSent = sent.get(1, TimeUnit.MINUTES);
return numSent == numReceived;
}
} catch (ExecutionException | SecurityException | UnauthorizedAccessException ex) {
Throwable cause = ex;
if (ex instanceof ExecutionException) {
cause = ex.getCause();
}
if (cause instanceof SecurityException || cause instanceof SaslSystemException || cause instanceof AuthenticationException || cause instanceof UnauthorizedAccessException) {
log.info("canAuth {} ({}): {}", destination.getSpec().getAddress(), destination.getSpec().getType(), ex.getMessage());
return false;
} else {
log.warn("canAuth {} ({}) exception", destination.getSpec().getAddress(), destination.getSpec().getType(), ex);
throw ex;
}
} finally {
sender.close();
receiver.close();
}
}
private AmqpClient createClient(Address dest, UserCredentials credentials) throws Exception {
AmqpClient client = null;
switch (dest.getSpec().getType()) {
case "queue":
case "anycast":
client = getAmqpClientFactory().createQueueClient(getSharedAddressSpace());
break;
case "topic":
client = getAmqpClientFactory().createTopicClient(getSharedAddressSpace());
break;
case "multicast":
client = getAmqpClientFactory().createBroadcastClient(getSharedAddressSpace());
break;
}
Objects.requireNonNull(client).getConnectOptions().setCredentials(credentials);
return client;
}
protected List<Address> getAddressesWildcard(AddressSpace addressspace) {
Address queue = new AddressBuilder()
.withNewMetadata()
.withNamespace(addressspace.getMetadata().getNamespace())
.withName(AddressUtils.generateAddressMetadataName(addressspace, "queue/1234"))
.endMetadata()
.withNewSpec()
.withType("queue")
.withAddress("queue/1234")
.withPlan(getDefaultPlan(AddressType.QUEUE))
.endSpec()
.build();
Address queue2 = new AddressBuilder()
.withNewMetadata()
.withNamespace(addressspace.getMetadata().getNamespace())
.withName(AddressUtils.generateAddressMetadataName(addressspace, "queue/ABCD"))
.endMetadata()
.withNewSpec()
.withType("queue")
.withAddress("queue/ABCD")
.withPlan(getDefaultPlan(AddressType.QUEUE))
.endSpec()
.build();
Address topic = new AddressBuilder()
.withNewMetadata()
.withNamespace(addressspace.getMetadata().getNamespace())
.withName(AddressUtils.generateAddressMetadataName(addressspace, "topic/2345"))
.endMetadata()
.withNewSpec()
.withType("topic")
.withAddress("topic/2345")
.withPlan(getDefaultPlan(AddressType.TOPIC))
.endSpec()
.build();
Address topic2 = new AddressBuilder()
.withNewMetadata()
.withNamespace(addressspace.getMetadata().getNamespace())
.withName(AddressUtils.generateAddressMetadataName(addressspace, "topic/ABCD"))
.endMetadata()
.withNewSpec()
.withType("topic")
.withAddress("topic/ABCD")
.withPlan(getDefaultPlan(AddressType.TOPIC))
.endSpec()
.build();
return Arrays.asList(queue, queue2, topic, topic2);
}
protected List<User> createUsersWildcard(AddressSpace addressSpace, Operation operation) throws
Exception {
List<User> users = new ArrayList<>();
users.add(UserUtils.createUserResource(new UserCredentials("user1", "password"))
.editSpec()
.withAuthorization(Collections.singletonList(new UserAuthorizationBuilder()
.withAddresses("*")
.withOperations(operation)
.build()))
.endSpec()
.done());
users.add(UserUtils.createUserResource(new UserCredentials("user2", "password"))
.editSpec()
.withAuthorization(Collections.singletonList(new UserAuthorizationBuilder()
|
package io.enmasse.systemtest.bases.clients;
import io.enmasse.systemtest.AddressType;
import io.enmasse.systemtest.Destination;
import io.enmasse.systemtest.clients.AbstractClient;
import io.enmasse.systemtest.clients.Argument;
import io.enmasse.systemtest.clients.ClientType;
import org.junit.Before;
import java.util.Arrays;
import java.util.concurrent.Future;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public abstract class MsgPatternsTestBase extends ClientTestBase {
@Before
public void setUpCommonArguments() {
arguments.put(Argument.USERNAME, "test");
arguments.put(Argument.PASSWORD, "test");
arguments.put(Argument.LOG_MESSAGES, "json");
arguments.put(Argument.CONN_SSL, "true");
}
protected void doBasicMessageTest(AbstractClient sender, AbstractClient receiver) throws Exception {
clients.addAll(Arrays.asList(sender, receiver));
int expectedMsgCount = 10;
Destination dest = Destination.queue("message-basic" + ClientType.getAddressName(sender),
getDefaultPlan(AddressType.QUEUE));
setAddresses(sharedAddressSpace, dest);
arguments.put(Argument.BROKER, getRouteEndpoint(sharedAddressSpace).toString());
arguments.put(Argument.ADDRESS, dest.getAddress());
arguments.put(Argument.COUNT, Integer.toString(expectedMsgCount));
arguments.put(Argument.MSG_CONTENT, "msg no. %d");
sender.setArguments(arguments);
arguments.remove(Argument.MSG_CONTENT);
receiver.setArguments(arguments);
assertTrue("Sender failed, expected return code 0", sender.run());
assertTrue("Receiver failed, expected return code 0", receiver.run());
assertEquals(String.format("Expected %d sent messages", expectedMsgCount),
expectedMsgCount, sender.getMessages().size());
assertEquals(String.format("Expected %d received messages", expectedMsgCount),
expectedMsgCount, receiver.getMessages().size());
}
protected void doRoundRobinReceiverTest(AbstractClient sender, AbstractClient receiver, AbstractClient receiver2)
throws Exception {
clients.addAll(Arrays.asList(sender, receiver, receiver2));
int expectedMsgCount = 10;
Destination dest = Destination.queue("receiver-round-robin" + ClientType.getAddressName(sender),
getDefaultPlan(AddressType.QUEUE));
setAddresses(sharedAddressSpace, dest);
arguments.put(Argument.BROKER, getRouteEndpoint(sharedAddressSpace).toString());
arguments.put(Argument.ADDRESS, dest.getAddress());
arguments.put(Argument.COUNT, Integer.toString(expectedMsgCount / 2));
arguments.put(Argument.TIMEOUT, "60");
receiver.setArguments(arguments);
receiver2.setArguments(arguments);
Future<Boolean> recResult = receiver.runAsync();
Future<Boolean> rec2Result = receiver2.runAsync();
arguments.put(Argument.COUNT, Integer.toString(expectedMsgCount));
arguments.put(Argument.MSG_CONTENT, "msg no. %d");
sender.setArguments(arguments);
assertTrue("Sender failed, expected return code 0", sender.run());
assertTrue("Receiver failed, expected return code 0", recResult.get());
assertTrue("Receiver failed, expected return code 0", rec2Result.get());
assertEquals(String.format("Expected %d sent messages", expectedMsgCount),
expectedMsgCount, sender.getMessages().size());
assertEquals(String.format("Expected %d received messages", expectedMsgCount / 2),
expectedMsgCount / 2, receiver.getMessages().size());
assertEquals(String.format("Expected %d sent messages", expectedMsgCount / 2),
expectedMsgCount / 2, receiver.getMessages().size());
}
protected void doTopicSubscribeTest(AbstractClient sender, AbstractClient subscriber, AbstractClient subscriber2,
boolean hasTopicPrefix) throws Exception {
clients.addAll(Arrays.asList(sender, subscriber, subscriber2));
int expectedMsgCount = 10;
Destination dest = Destination.topic("topic-subscribe" + ClientType.getAddressName(sender),
getDefaultPlan(AddressType.TOPIC));
setAddresses(sharedAddressSpace, dest);
arguments.put(Argument.BROKER, getRouteEndpoint(sharedAddressSpace).toString());
arguments.put(Argument.ADDRESS, getTopicPrefix(hasTopicPrefix) + dest.getAddress());
arguments.put(Argument.COUNT, Integer.toString(expectedMsgCount));
arguments.put(Argument.MSG_CONTENT, "msg no. %d");
arguments.put(Argument.TIMEOUT, "100");
sender.setArguments(arguments);
arguments.remove(Argument.MSG_CONTENT);
subscriber.setArguments(arguments);
subscriber2.setArguments(arguments);
Future<Boolean> recResult = subscriber.runAsync();
Future<Boolean> recResult2 = subscriber2.runAsync();
if (isBrokered(sharedAddressSpace)) {
waitForSubscribers(sharedAddressSpace, dest.getAddress(), 2);
} else {
waitForSubscribersConsole(sharedAddressSpace, dest, 2);
}
assertTrue("Producer failed, expected return code 0", sender.run());
assertTrue("Subscriber failed, expected return code 0", recResult.get());
assertTrue("Subscriber failed, expected return code 0", recResult2.get());
assertEquals(String.format("Expected %d sent messages", expectedMsgCount),
expectedMsgCount, sender.getMessages().size());
assertEquals(String.format("Expected %d received messages", expectedMsgCount),
expectedMsgCount, subscriber.getMessages().size());
assertEquals(String.format("Expected %d received messages", expectedMsgCount),
expectedMsgCount, subscriber2.getMessages().size());
}
protected void doMessageBrowseTest(AbstractClient sender, AbstractClient receiver_browse, AbstractClient receiver_receive)
throws Exception {
clients.addAll(Arrays.asList(sender, receiver_browse, receiver_receive));
int expectedMsgCount = 10;
Destination dest = Destination.queue("message-browse" + ClientType.getAddressName(sender),
getDefaultPlan(AddressType.QUEUE));
setAddresses(sharedAddressSpace, dest);
arguments.put(Argument.BROKER, getRouteEndpoint(sharedAddressSpace).toString());
arguments.put(Argument.ADDRESS, dest.getAddress());
arguments.put(Argument.COUNT, Integer.toString(expectedMsgCount));
arguments.put(Argument.MSG_CONTENT, "msg no. %d");
sender.setArguments(arguments);
arguments.remove(Argument.MSG_CONTENT);
arguments.put(Argument.RECV_BROWSE, "true");
receiver_browse.setArguments(arguments);
arguments.put(Argument.RECV_BROWSE, "false");
receiver_receive.setArguments(arguments);
assertTrue("Sender failed, expected return code 0", sender.run());
assertTrue("Browse receiver failed, expected return code 0", receiver_browse.run());
assertTrue("Receiver failed, expected return code 0", receiver_receive.run());
assertEquals(String.format("Expected %d sent messages", expectedMsgCount),
expectedMsgCount, sender.getMessages().size());
assertEquals(String.format("Expected %d browsed messages", expectedMsgCount),
expectedMsgCount, receiver_browse.getMessages().size());
assertEquals(String.format("Expected %d received messages", expectedMsgCount),
expectedMsgCount, receiver_receive.getMessages().size());
}
protected void doDrainQueueTest(AbstractClient sender, AbstractClient receiver) throws Exception {
Destination dest = Destination.queue("drain-queue" + ClientType.getAddressName(sender),
getDefaultPlan(AddressType.QUEUE));
setAddresses(sharedAddressSpace, dest);
clients.addAll(Arrays.asList(sender, receiver));
int expectedMsgCount = 50;
arguments.put(Argument.BROKER, getRouteEndpoint(sharedAddressSpace).toString());
arguments.put(Argument.ADDRESS, dest.getAddress());
arguments.put(Argument.COUNT, Integer.toString(expectedMsgCount));
arguments.put(Argument.MSG_CONTENT, "msg no. %d");
sender.setArguments(arguments);
arguments.remove(Argument.MSG_CONTENT);
arguments.put(Argument.COUNT, "0");
receiver.setArguments(arguments);
assertTrue("Sender failed, expected return code 0", sender.run());
assertTrue("Drain receiver failed, expected return code 0", receiver.run());
assertEquals(String.format("Expected %d sent messages", expectedMsgCount),
expectedMsgCount, sender.getMessages().size());
assertEquals(String.format("Expected %d received messages", expectedMsgCount),
expectedMsgCount, receiver.getMessages().size());
}
protected void doMessageSelectorQueueTest(AbstractClient sender, AbstractClient receiver) throws Exception {
int expectedMsgCount = 10;
clients.addAll(Arrays.asList(sender, receiver));
Destination queue = Destination.queue("selector-queue" + ClientType.getAddressName(sender),
getDefaultPlan(AddressType.QUEUE));
setAddresses(sharedAddressSpace, queue);
arguments.put(Argument.BROKER, getRouteEndpoint(sharedAddressSpace).toString());
arguments.put(Argument.COUNT, Integer.toString(expectedMsgCount));
arguments.put(Argument.ADDRESS, queue.getAddress());
arguments.put(Argument.MSG_PROPERTY, "colour~red");
arguments.put(Argument.MSG_PROPERTY, "number~12.65");
arguments.put(Argument.MSG_PROPERTY, "a~true");
arguments.put(Argument.MSG_PROPERTY, "b~false");
arguments.put(Argument.MSG_CONTENT, "msg no. %d");
//send messages
sender.setArguments(arguments);
assertTrue("Sender failed, expected return code 0", sender.run());
assertEquals(String.format("Expected %d sent messages", expectedMsgCount),
expectedMsgCount, sender.getMessages().size());
arguments.remove(Argument.MSG_PROPERTY);
arguments.remove(Argument.MSG_CONTENT);
arguments.put(Argument.RECV_BROWSE, "true");
arguments.put(Argument.COUNT, "0");
//receiver with selector colour = red
arguments.put(Argument.SELECTOR, "colour = 'red'");
receiver.setArguments(arguments);
assertTrue("Receiver 'colour = red' failed, expected return code 0", receiver.run());
assertEquals(String.format("Expected %d received messages 'colour = red'", expectedMsgCount),
expectedMsgCount, receiver.getMessages().size());
//receiver with selector number > 12.5
arguments.put(Argument.SELECTOR, "number > 12.5");
receiver.setArguments(arguments);
assertTrue("Receiver 'number > 12.5' failed, expected return code 0", receiver.run());
assertEquals(String.format("Expected %d received messages 'colour = red'", expectedMsgCount),
expectedMsgCount, receiver.getMessages().size());
//receiver with selector a AND b
arguments.put(Argument.SELECTOR, "a AND b");
receiver.setArguments(arguments);
assertTrue("Receiver 'a AND b' failed, expected return code 0", receiver.run());
assertEquals(String.format("Expected %d received messages 'a AND b'", 0),
0, receiver.getMessages().size());
//receiver with selector a OR b
arguments.put(Argument.RECV_BROWSE, "false");
arguments.put(Argument.SELECTOR, "a OR b");
receiver.setArguments(arguments);
assertTrue("Receiver 'a OR b' failed, expected return code 0", receiver.run());
assertEquals(String.format("Expected %d received messages 'a OR b'", expectedMsgCount),
expectedMsgCount, receiver.getMessages().size());
}
protected void doMessageSelectorTopicTest(AbstractClient sender, AbstractClient subscriber,
AbstractClient subscriber2, AbstractClient subscriber3, boolean hasTopicPrefix) throws Exception {
clients.addAll(Arrays.asList(sender, subscriber, subscriber2, subscriber3));
int expectedMsgCount = 10;
Destination topic = Destination.topic("selector-topic" + ClientType.getAddressName(sender),
getDefaultPlan(AddressType.TOPIC));
setAddresses(sharedAddressSpace, topic);
arguments.put(Argument.BROKER, getRouteEndpoint(sharedAddressSpace).toString());
arguments.put(Argument.COUNT, Integer.toString(expectedMsgCount));
arguments.put(Argument.ADDRESS, getTopicPrefix(hasTopicPrefix) + topic.getAddress());
arguments.put(Argument.MSG_PROPERTY, "colour~red");
arguments.put(Argument.MSG_PROPERTY, "number~12.65");
arguments.put(Argument.MSG_PROPERTY, "a~true");
arguments.put(Argument.MSG_PROPERTY, "b~false");
arguments.put(Argument.TIMEOUT, "100");
arguments.put(Argument.MSG_CONTENT, "msg no. %d");
//set up sender
sender.setArguments(arguments);
arguments.remove(Argument.MSG_PROPERTY);
arguments.remove(Argument.MSG_CONTENT);
//set up subscriber1
arguments.put(Argument.SELECTOR, "colour = 'red'");
subscriber.setArguments(arguments);
//set up subscriber2
arguments.put(Argument.SELECTOR, "number > 12.5");
subscriber2.setArguments(arguments);
//set up subscriber3
arguments.put(Argument.SELECTOR, "a AND b");
subscriber3.setArguments(arguments);
Future<Boolean> result1 = subscriber.runAsync();
Future<Boolean> result2 = subscriber2.runAsync();
Future<Boolean> result3 = subscriber3.runAsync();
if (isBrokered(sharedAddressSpace)) {
waitForSubscribers(sharedAddressSpace, topic.getAddress(), 3);
} else {
waitForSubscribersConsole(sharedAddressSpace, topic, 3);
}
assertTrue("Sender failed, expected return code 0", sender.run());
assertTrue("Receiver 'colour = red' failed, expected return code 0", result1.get());
assertTrue("Receiver 'number > 12.5' failed, expected return code 0", result2.get());
assertTrue("Receiver 'a AND b' failed, expected return code 0", result3.get());
assertEquals(String.format("Expected %d sent messages", expectedMsgCount),
expectedMsgCount, sender.getMessages().size());
assertEquals(String.format("Expected %d received messages 'colour = red'", expectedMsgCount),
expectedMsgCount, subscriber.getMessages().size());
assertEquals(String.format("Expected %d received messages 'number > 12.5'", expectedMsgCount),
expectedMsgCount, subscriber2.getMessages().size());
assertEquals(String.format("Expected %d received messages 'a AND b'", 0),
0, subscriber3.getMessages().size());
}
}
|
package codegen;
import java.io.File;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.sonatype.plexus.build.incremental.BuildContext;
@Mojo(name = "codegen", defaultPhase = LifecyclePhase.GENERATE_SOURCES)
public class CodeGenMojo extends AbstractMojo {
@Component
private BuildContext context;
@Parameter(property = "configFile", required = true)
private File configFile;
@Parameter(defaultValue = "${project.build.directory}/generated-sources/")
private File srcDirectory;
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("Running CodeGen...");
if (!srcDirectory.exists()) {
throw new MojoExecutionException(String.format("%s does not exist", srcDirectory.getName()));
}
if (!srcDirectory.isDirectory()) {
throw new MojoExecutionException(String.format("%s is not a directory", srcDirectory.getName()));
}
final List<File> files = listFilesForFolder(srcDirectory);
final List<File> filesForProcessing = getFilesForProcessing(files);
if (filesForProcessing.isEmpty()) {
getLog().info("No files have changed! Not processing...");
return;
}
List<File> parsed;
try {
final CodeGenerator generator = new CodeGenerator(configFile);
parsed = generator.parse(filesForProcessing);
} catch (final ParseExceptions pe) {
throw new MojoFailureException("Couldn't parse files: " + pe.getExceptions(), pe);
} catch (final Exception e) {
throw new MojoFailureException("Couldn't process files", e);
}
printParsedFiles(parsed);
}
private List<File> getFilesForProcessing(List<File> files) {
final List<File> filesForProcessing = new ArrayList<File>();
if (context.isIncremental()) {
if (context.hasDelta(configFile)) {
getLog().info("Config file " + configFile + " has been changed, refreshing files...");
filesForProcessing.addAll(listFiles(files));
} else {
filesForProcessing.addAll(filterFilesOnBuildContext(files));
}
} else {
filesForProcessing.addAll(listFiles(files));
}
return filesForProcessing;
}
private Collection<? extends File> listFiles(List<File> files) {
for (File file : files) {
getLog().info("Processing " + file);
}
return files;
}
private void printParsedFiles(List<File> parsed) {
for (File file : parsed) {
getLog().info("Processed " + file);
context.refresh(file);
}
}
private List<File> filterFilesOnBuildContext(List<File> files) {
List<File> filesForProcessing = new ArrayList<File>();
for (File file : files) {
if (context.hasDelta(file)) {
getLog().info("Processing " + file);
filesForProcessing.add(file);
}
}
return filesForProcessing;
}
private List<File> listFilesForFolder(File srcDirectory2) {
List<File> files = new ArrayList<File>();
for (File file : srcDirectory2.listFiles(getFileFilter())) {
if (file.isDirectory()) {
files.addAll(listFilesForFolder(file));
} else {
getLog().debug("Adding file to content " + file);
files.add(file);
}
}
return files;
}
private FileFilter getFileFilter() {
return new FileFilter() {
@Override
public boolean accept(File pathname) {
if (pathname.isDirectory()) {
return true;
}
if (pathname.getName().endsWith(".java")) {
return true;
}
return false;
}
};
}
}
|
package com.github.andriell.gui;
import com.github.andriell.db.ProductDao;
import org.hibernate.criterion.Junction;
import org.hibernate.criterion.Restrictions;
import org.springframework.beans.factory.InitializingBean;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
public class ProductsWorkArea implements WorkArea, InitializingBean {
Font font = new Font("Segoe UI", Font.PLAIN, 10);
Insets insets = new Insets(2, 2, 2, 2);
StringBuilder query = new StringBuilder();
ProductDao productDao;
private String name = "Продукты";
private JPanel rootPanel;
private JPanel paginationPanel;
private JPanel dataPanel;
private JPanel filterPanel;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public JPanel getRootPanel() {
return rootPanel;
}
public void afterPropertiesSet() throws Exception { }
private void createUIComponents() {
filterPanel = new Filter(null);
}
public void setProductDao(ProductDao productDao) {
this.productDao = productDao;
}
class Filter extends JPanel {
Filter rootPanel;
Filter parent;
JPanel northPanel;
JPanel conditionPanel;
JPanel filtersPanel;
JButton groupButton;
JButton conditionButton;
JButton closeButton;
JComboBox conditionComboBox;
public Filter(Filter p) {
rootPanel = this;
this.parent = p;
setLayout(new BorderLayout());
setBorder(BorderFactory.createCompoundBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5), BorderFactory.createLineBorder(Color.LIGHT_GRAY)));
northPanel = new JPanel(new FlowLayout(FlowLayout.LEFT));
add(northPanel, BorderLayout.NORTH);
groupButton = new JButton("Группа");
groupButton.setFont(font);
groupButton.setMargin(insets);
groupButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
filtersPanel.add(new Filter(rootPanel));
filtersPanel.updateUI();
}
});
conditionButton = new JButton("Условие");
conditionButton.setFont(font);
conditionButton.setMargin(insets);
conditionButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
conditionPanel.add(new Condition(rootPanel));
conditionPanel.updateUI();
}
});
conditionComboBox = new JComboBox();
conditionComboBox.setFont(font);
conditionComboBox.addItem("AND");
conditionComboBox.addItem("OR");
if (p == null) {
closeButton = new JButton("Query");
closeButton.setFont(font);
closeButton.setMargin(insets);
closeButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Filter filter = (Filter) filterPanel;
Junction junction = filter.render();
System.out.println(junction);
}
});
} else {
closeButton = new JButton("X");
closeButton.setFont(font);
closeButton.setMargin(insets);
closeButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
parent.filtersPanel.remove(rootPanel);
parent.filtersPanel.updateUI();
}
});
}
northPanel.add(groupButton);
northPanel.add(conditionButton);
northPanel.add(conditionComboBox);
northPanel.add(closeButton);
JPanel centerPanel = new JPanel();
centerPanel.setLayout(new BoxLayout(centerPanel, BoxLayout.PAGE_AXIS));
filtersPanel = new JPanel();
filtersPanel.setLayout(new BoxLayout(filtersPanel, BoxLayout.PAGE_AXIS));
centerPanel.add(filtersPanel);
conditionPanel = new JPanel();
conditionPanel.setLayout(new BoxLayout(conditionPanel, BoxLayout.PAGE_AXIS));
centerPanel.add(conditionPanel);
add(centerPanel, BorderLayout.CENTER);
}
public Junction render() {
Junction junction;
if ("AND".equals(conditionComboBox.getSelectedItem())) {
junction = Restrictions.and();
} else {
junction = Restrictions.or();
}
Component[] components = filtersPanel.getComponents();
if (components != null) {
for (Component component: components) {
if (component instanceof Filter) {
Filter filter = (Filter) component;
junction.add(filter.render());
}
}
}
components = conditionPanel.getComponents();
if (components != null) {
for (Component component: components) {
if (component instanceof Condition) {
Condition condition = (Condition) component;
condition.render(junction);
}
}
}
return junction;
}
}
class Condition extends JPanel {
JPanel rootPanel;
JComboBox column;
JComboBox condition;
JTextField value;
JButton close;
Filter parent;
public Condition(Filter p) {
rootPanel = this;
this.parent = p;
setLayout(new FlowLayout(FlowLayout.LEFT));
column = new JComboBox();
column.setFont(font);
String[] fields = productDao.searchFields();
for (String f:fields) {
column.addItem(f);
}
add(column);
condition = new JComboBox();
condition.setFont(font);
condition.addItem("==");
condition.addItem("!=");
condition.addItem(">");
condition.addItem(">=");
condition.addItem("<");
condition.addItem("<=");
condition.addItem("LIKE");
condition.addItem("IN");
condition.addItem("NOT IN");
condition.addItem("RANGE");
condition.addItem("NULL");
condition.addItem("NOT NULL");
add(condition);
value = new JTextField();
value.setColumns(20);
value.setFont(font);
value.setMargin(insets);
add(value);
close = new JButton("X");
close.setFont(font);
close.setMargin(insets);
close.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
parent.conditionPanel.remove(rootPanel);
parent.conditionPanel.updateUI();
}
});
add(close);
}
public void render(Junction junction) {
String[] s;
String val = value.getText();
String cond = condition.getSelectedItem().toString();
String col = column.getSelectedItem().toString();
if ("==".equals(cond)) {
junction.add(Restrictions.eq(col, val));
} else if ("!=".equals(cond)) {
junction.add(Restrictions.ne(col, val));
} else if (">".equals(cond)) {
junction.add(Restrictions.gt(col, val));
} else if (">=".equals(cond)) {
junction.add(Restrictions.ge(col, val));
} else if ("<".equals(cond)) {
junction.add(Restrictions.lt(col, val));
} else if ("<=".equals(cond)) {
junction.add(Restrictions.le(col, val));
} else if ("LIKE".equals(cond)) {
junction.add(Restrictions.like(col, val));
} else if ("IN".equals(cond)) {
s = val.split(";");
junction.add(Restrictions.in(col, s));
} else if ("NOT IN".equals(cond)) {
s = val.split(";");
junction.add(Restrictions.not(Restrictions.in(col, s)));
} else if ("RANGE".equals(cond) && val != null) {
s = val.split(";", 2);
if (s.length == 2) {
junction.add(Restrictions.between(col, s[0].trim(), s[1].trim()));
}
} else if ("NULL".equals(cond)) {
junction.add(Restrictions.isNull(col));
} else if ("NOT NULL".equals(cond)) {
junction.add(Restrictions.isNotNull(col));
}
}
}
}
|
package com.jetbrains.env.python;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.jetbrains.env.python.console.PyConsoleTask;
import com.jetbrains.env.python.debug.PyEnvTestCase;
import org.junit.Assert;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* @author traff
*/
public class PythonConsoleTest extends PyEnvTestCase {
public void testConsolePrint() throws Exception {
runPythonTest(new PyConsoleTask() {
@Override
public void testing() throws Exception {
exec("x = 96");
exec("x += 1");
exec("print(1)");
exec("print(x)");
waitForOutput("97");
}
});
}
public void testExecuteMultiline() throws Exception { //PY-4329
runPythonTest(new PyConsoleTask() {
@Override
public void testing() throws Exception {
exec("if True:\n" +
" x=1\n" +
"y=x+100\n" +
"for i in range(1):\n" +
" print(y)\n");
waitForOutput("101");
}
@Override
public Set<String> getTags() {
return Sets.newHashSet("jython");
}
});
}
public void testInterruptAsync() throws Exception {
runPythonTest(new PyConsoleTask() {
@Override
public void testing() throws Exception {
exec("import time");
execNoWait("for i in range(10000):\n" +
" print(i)\n" +
" time.sleep(0.1)");
waitForOutput("3\n4\n5");
Assert.assertFalse(canExecuteNow());
interrupt();
waitForFinish();
waitForReady();
}
@Override
public Set<String> getTags() {
return new ImmutableSet.Builder<String>().addAll(super.getTags()).add("-iron").build();
}
});
}
public void testLineByLineInput() throws Exception {
runPythonTest(new PyConsoleTask() {
@Override
public void testing() throws Exception {
exec("x = 96");
exec("x +=1");
exec("if True:");
exec("");
exec(" print(x)");
exec("");
exec("");
waitForOutput("97");
}
});
}
public void testVariablesView() throws Exception {
runPythonTest(new PyConsoleTask() {
@Override
public void testing() throws Exception {
exec("x = 1");
exec("print(x)");
waitForOutput("1");
assertTrue("Variable has wrong value",
hasValue("x", "1"));
}
});
}
public void testCompoundVariable() throws Exception {
runPythonTest(new PyConsoleTask() {
@Override
public void testing() throws Exception {
exec("x = [1, 2, 3]");
exec("print(x)");
waitForOutput("[1, 2, 3]");
List<String> values = getCompoundValueChildren(getValue("x"));
Collections.sort(values);
assertContainsElements(values, "1", "2", "3", "3");
}
});
}
public void testChangeVariable() throws Exception {
runPythonTest(new PyConsoleTask() {
@Override
public void testing() throws Exception {
exec("x = 1");
exec("print(x)");
waitForOutput("1");
setValue("x", "2");
assertTrue("Variable has wrong value",
hasValue("x", "2"));
}
});
}
}
|
package com.github.therapi.apidoc;
import static com.github.therapi.core.internal.LangHelper.index;
import static org.apache.commons.lang3.StringUtils.removeStart;
import static org.apache.commons.lang3.StringUtils.substringAfter;
import static org.apache.commons.lang3.StringUtils.substringBefore;
import static org.apache.commons.lang3.StringUtils.substringBetween;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Supplier;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.github.therapi.core.MethodRegistry;
import com.github.therapi.core.internal.MethodDefinition;
import com.github.therapi.core.internal.ParameterDefinition;
import com.github.therapi.runtimejavadoc.ClassJavadoc;
import com.github.therapi.runtimejavadoc.Comment;
import com.github.therapi.runtimejavadoc.CommentFormatter;
import com.github.therapi.runtimejavadoc.MethodJavadoc;
import com.github.therapi.runtimejavadoc.ParamJavadoc;
import com.github.therapi.runtimejavadoc.RuntimeJavadocReader;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.SortedSetMultimap;
import com.google.common.collect.TreeMultimap;
public class ApiDocProvider {
private final CommentFormatter commentFormatter = new CommentFormatter();
private final RuntimeJavadocReader javadocReader = new RuntimeJavadocReader();
public List<TherapiNamespaceDoc> getDocumentation(MethodRegistry registry) throws IOException {
final ObjectWriter prettyWriter = registry.getObjectMapper().writerWithDefaultPrettyPrinter();
final List<TherapiNamespaceDoc> namespaces = new ArrayList<>();
final SortedSetMultimap<String, MethodDefinition> methodDefinitionsByNamespace = TreeMultimap.create(
Comparator.<String>naturalOrder(), new Comparator<MethodDefinition>() {
@Override
public int compare(MethodDefinition o1, MethodDefinition o2) {
return o1.getUnqualifiedName().compareTo(o2.getUnqualifiedName());
}
}
);
for (MethodDefinition mdef : registry.getMethods()) {
methodDefinitionsByNamespace.put(mdef.getNamespace().orElse(""), mdef);
}
for (String namespaceName : methodDefinitionsByNamespace.keySet()) {
final TherapiNamespaceDoc nsDoc = new TherapiNamespaceDoc();
nsDoc.setName(namespaceName);
final List<TherapiMethodDoc> methods = new ArrayList<>();
for (MethodDefinition mdef : methodDefinitionsByNamespace.get(namespaceName)) {
final TherapiMethodDoc mdoc = new TherapiMethodDoc();
mdoc.setName(mdef.getUnqualifiedName());
final Optional<MethodJavadoc> methodJavadocOptional = getJavadoc(mdef);
final Map<String, ParamJavadoc> javadocsByParamName = methodJavadocOptional.isPresent()
? index(methodJavadocOptional.get().getParams(), ParamJavadoc::getName)
: ImmutableMap.<String, ParamJavadoc>of();
if (methodJavadocOptional.isPresent()) {
mdoc.setDescription(render(methodJavadocOptional.get().getComment()));
mdoc.setReturns(render(methodJavadocOptional.get().getReturns()));
}
final List<TherapiParamDoc> paramDocs = new ArrayList<>();
for (ParameterDefinition pdef : mdef.getParameters()) {
final TherapiParamDoc pdoc = new TherapiParamDoc();
pdoc.setName(pdef.getName());
pdoc.setType(toJsonType(pdef.getType()));
Optional<Supplier<?>> defaultSupplier = pdef.getDefaultValueSupplier();
if (defaultSupplier.isPresent()) {
pdoc.setDefaultValue(prettyWriter.writeValueAsString(defaultSupplier.get().get()));
}
ParamJavadoc paramJavadoc = javadocsByParamName.get(pdef.getName());
if (paramJavadoc != null) {
pdoc.setDescription(render(paramJavadoc.getComment()));
}
paramDocs.add(pdoc);
}
mdoc.setParams(paramDocs);
methods.add(mdoc);
}
nsDoc.setMethods(methods);
namespaces.add(nsDoc);
}
return namespaces;
}
protected String toJsonType(TypeReference typeRef) {
String typeName = typeRef.getType().toString();
typeName = removeStart(typeName, "class ");
typeName = removeStart(typeName, "interface ");
if (typeName.equals("int") || typeName.equals("long")) {
return "integer";
}
if (typeName.equals("float") || typeName.equals("double")) {
return "number";
}
typeName = typeName.replace("java.lang.Object", "any");
typeName = typeName.replace("java.lang.String", "string");
typeName = typeName.replace("java.lang.Integer", "integer");
typeName = typeName.replace("java.lang.Long", "integer");
typeName = typeName.replace("java.lang.Float", "number");
typeName = typeName.replace("java.lang.Double", "number");
typeName = typeName.replace("java.util.Set", "array");
typeName = typeName.replace("java.util.List", "array");
typeName = typeName.replace("java.util.Collection", "array");
typeName = typeName.replace("java.util.Map", "map");
typeName = typeName.replace("java.util.Optional", "optional");
typeName = typeName.replace("com.google.common.base.Optional", "optional");
if (typeName.startsWith("com.google.common.collect.Multimap")) {
String params = substringBetween(typeName, "<", ">");
String keyType = substringBefore(params, ",").trim();
String valueType = substringAfter(params, ",").trim();
typeName = "map<" + keyType + ", array<" + valueType + ">>";
}
return typeName;
}
public Optional<MethodJavadoc> getJavadoc(MethodDefinition m) throws IOException {
ClassJavadoc classJavadoc = javadocReader.getDocumentation(m.getMethod().getDeclaringClass().getName());
if (classJavadoc == null) {
return Optional.empty();
}
for (MethodJavadoc methodJavadoc : classJavadoc.getMethods()) {
if (methodJavadoc.getName().equals(m.getMethod().getName())) {
return Optional.of(methodJavadoc);
}
}
return Optional.empty();
}
public Optional<TherapiMethodDoc> getMethodDoc(MethodDefinition m) throws IOException {
ClassJavadoc classJavadoc = javadocReader.getDocumentation(m.getMethod().getDeclaringClass().getName());
if (classJavadoc == null) {
return Optional.empty();
}
for (MethodJavadoc methodJavadoc : classJavadoc.getMethods()) {
if (methodJavadoc.getName().equals(m.getMethod().getName())) {
TherapiMethodDoc doc = new TherapiMethodDoc();
doc.setName(m.getQualifiedName("."));
doc.setDescription(render(methodJavadoc.getComment()));
return Optional.of(doc);
}
}
return Optional.empty();
}
protected String render(Comment comment) {
return commentFormatter.format(comment);
}
}
|
package com.metamx.druid.loading;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import com.metamx.common.ISE;
import com.metamx.common.MapUtils;
import com.metamx.common.logger.Logger;
import com.metamx.druid.client.DataSegment;
import com.metamx.druid.db.DbConnectorConfig;
import org.codehaus.jackson.map.ObjectMapper;
import org.jets3t.service.ServiceException;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.joda.time.Interval;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.FoldController;
import org.skife.jdbi.v2.Folder3;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.StatementContext;
import org.skife.jdbi.v2.tweak.HandleCallback;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
public class S3SegmentKiller implements SegmentKiller
{
private static final Logger log = new Logger(S3SegmentKiller.class);
private final RestS3Service s3Client;
private final DBI dbi;
private final DbConnectorConfig config;
private final ObjectMapper jsonMapper;
@Inject
public S3SegmentKiller(
RestS3Service s3Client,
DBI dbi,
DbConnectorConfig config,
ObjectMapper jsonMapper
)
{
this.s3Client = s3Client;
this.dbi = dbi;
this.config = config;
this.jsonMapper = jsonMapper;
}
@Override
public List<DataSegment> kill(final String datasource, final Interval interval) throws ServiceException
{
// TODO -- Awkward for workers to use the DB!
List<DataSegment> matchingSegments = dbi.withHandle(
new HandleCallback<List<DataSegment>>()
{
@Override
public List<DataSegment> withHandle(Handle handle) throws Exception
{
return handle.createQuery(
String.format(
"SELECT payload FROM %s WHERE dataSource = :dataSource and start >= :start and end <= :end and used = 0",
config.getSegmentTable()
)
)
.bind("dataSource", datasource)
.bind("start", interval.getStart().toString())
.bind("end", interval.getEnd().toString())
.fold(
Lists.<DataSegment>newArrayList(),
new Folder3<List<DataSegment>, Map<String, Object>>()
{
@Override
public List<DataSegment> fold(
List<DataSegment> accumulator,
Map<String, Object> stringObjectMap,
FoldController foldController,
StatementContext statementContext
) throws SQLException
{
try {
DataSegment segment = jsonMapper.readValue(
(String) stringObjectMap.get("payload"),
DataSegment.class
);
accumulator.add(segment);
return accumulator;
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
}
);
}
}
);
log.info("Found %,d segments for %s for interval %s.", matchingSegments.size(), datasource, interval);
for (final DataSegment segment : matchingSegments) {
// Remove from S3
Map<String, Object> loadSpec = segment.getLoadSpec();
String s3Bucket = MapUtils.getString(loadSpec, "bucket");
String s3Path = MapUtils.getString(loadSpec, "key");
String s3DescriptorPath = s3Path.substring(0, s3Path.lastIndexOf("/")) + "/descriptor.json";
if (s3Client.isObjectInBucket(s3Bucket, s3Path)) {
log.info("Removing index file[s3://%s/%s] from s3!", s3Bucket, s3Path);
s3Client.deleteObject(s3Bucket, s3Path);
}
if (s3Client.isObjectInBucket(s3Bucket, s3DescriptorPath)) {
log.info("Removing descriptor file[s3://%s/%s] from s3!", s3Bucket, s3DescriptorPath);
s3Client.deleteObject(s3Bucket, s3DescriptorPath);
}
}
return matchingSegments;
}
}
|
package org.wildfly.core.testrunner;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import javax.inject.Inject;
import org.jboss.as.controller.client.ModelControllerClient;
import org.jboss.dmr.ModelNode;
import org.junit.runner.Result;
import org.junit.runner.notification.RunListener;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkField;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.TestClass;
/**
* A lightweight test runner for running management based tests
*
* @author Stuart Douglas
*/
public class WildflyTestRunner extends BlockJUnit4ClassRunner {
private final ServerController controller = new ServerController();
private final boolean automaticServerControl;
private final List<ServerSetupTask> serverSetupTasks = new LinkedList<>();
/**
* Creates a BlockJUnit4ClassRunner to run {@code klass}
*
* @throws org.junit.runners.model.InitializationError if the test class is malformed.
*/
public WildflyTestRunner(Class<?> klass) throws InitializationError {
super(klass);
if (klass.isAnnotationPresent(ServerControl.class)) {
ServerControl serverControl = klass.getAnnotation(ServerControl.class);
automaticServerControl = !serverControl.manual();
}else{
automaticServerControl = true;
}
startServerIfRequired();
doInject(getTestClass(), null);
prepareSetupTasks(getTestClass());
}
private void doInject(TestClass klass, Object instance) {
try {
for (FrameworkField frameworkField : klass.getAnnotatedFields(Inject.class)) {
Field field = frameworkField.getField();
if ((instance == null && Modifier.isStatic(field.getModifiers()) ||
instance != null)) {//we want to do injection even on static fields before test run, so we make sure that client is correct for current state of server
field.setAccessible(true);
if (field.getType() == ManagementClient.class && controller.isStarted()) {
field.set(instance, controller.getClient());
} else if (field.getType() == ModelControllerClient.class && controller.isStarted()) {
field.set(instance, controller.getClient().getControllerClient());
} else if (field.getType() == ServerController.class) {
field.set(instance, controller);
}
}
}
} catch (Exception e) {
throw new RuntimeException("Failed to inject", e);
}
}
@Override
protected Object createTest() throws Exception {
Object res = super.createTest();
doInject(getTestClass(), res);
return res;
}
@Override
public void run(final RunNotifier notifier){
notifier.addListener(new RunListener() {
@Override
public void testRunFinished(Result result) throws Exception {
super.testRunFinished(result);
if (automaticServerControl) {
controller.stop();
}
}
});
startServerIfRequired();
if (!serverSetupTasks.isEmpty() && !automaticServerControl) {
throw new RuntimeException("Can't run setup tasks with manual server control");
}
if (automaticServerControl) {
runSetupTasks();
}
super.run(notifier);
if (automaticServerControl) {
runTearDownTasks();
}
}
private void runSetupTasks() {
for (ServerSetupTask task : serverSetupTasks) {
try {
task.setup(controller.getClient());
} catch (Exception e) {
throw new RuntimeException(String.format("Could not run setup task '%s'", task), e);
}
}
}
private void runTearDownTasks() {
List<ServerSetupTask> reverseServerSetupTasks = new LinkedList<>(serverSetupTasks);
Collections.reverse(reverseServerSetupTasks);
for (ServerSetupTask task : reverseServerSetupTasks) {
try {
task.tearDown(controller.getClient());
} catch (Exception e) {
throw new RuntimeException(String.format("Could not run tear down task '%s'", task), e);
}
}
checkServerState();
}
private void prepareSetupTasks(TestClass klass) throws InitializationError {
try {
if (klass.getJavaClass().isAnnotationPresent(ServerSetup.class)) {
ServerSetup serverSetup = klass.getAnnotation(ServerSetup.class);
for (Class<? extends ServerSetupTask> clazz : serverSetup.value()) {
Constructor<? extends ServerSetupTask> ctor = clazz.getDeclaredConstructor();
ctor.setAccessible(true);
serverSetupTasks.add(ctor.newInstance());
}
}
} catch (Exception e) {
throw new InitializationError(e);
}
}
private void startServerIfRequired() {
if (automaticServerControl) {
controller.start();
}
}
private void checkServerState() {
ModelNode op = new ModelNode();
op.get("operation").set("read-attribute");
op.get("name").set("server-state");
try {
ModelNode result = controller.getClient().executeForResult(op);
if (!"running".equalsIgnoreCase(result.asString())) {
throw new RuntimeException(String.format("Server state is '%s' following test completion; tests must complete with the server in 'running' state", result.asString()));
}
} catch (UnsuccessfulOperationException e) {
throw new RuntimeException("Failed checking server-state", e);
}
}
}
|
package reactor.kafka;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import reactor.fn.Consumer;
import reactor.fn.Function;
import reactor.fn.tuple.Tuple2;
import java.util.Properties;
public class KafkaSubscriber<K, V> implements Subscriber<Tuple2<K, V>> {
private final KafkaProducer<K, V> kafkaProducer;
private final String topic;
private final Function<K, Integer> partitioner;
private final Consumer<Throwable> errorConsumer;
private volatile Subscription subscription;
public KafkaSubscriber(Properties producerProperties,
String topic,
Function<K, Integer> partitioner,
Consumer<Throwable> errorConsumer) {
this.kafkaProducer = new KafkaProducer<>(producerProperties);
this.topic = topic;
this.partitioner = partitioner;
this.errorConsumer = errorConsumer;
}
@Override
public void onSubscribe(Subscription subscription) {
this.subscription.request(Long.MAX_VALUE);
}
@Override
public void onNext(Tuple2<K, V> tuple) {
ProducerRecord<K, V> record = new ProducerRecord<>(topic,
partitioner.apply(tuple.getT1()),
tuple.getT1(),
tuple.getT2());
kafkaProducer.send(record);
this.subscription.request(Long.MAX_VALUE);
}
@Override
public void onError(Throwable throwable) {
errorConsumer.accept(throwable);
}
@Override
public void onComplete() {
this.kafkaProducer.close();
}
}
|
// Template Source: BaseEntity.java.tt
package com.microsoft.graph.models;
import com.microsoft.graph.serializer.ISerializer;
import com.microsoft.graph.serializer.IJsonBackedObject;
import com.microsoft.graph.serializer.AdditionalDataManager;
import java.util.EnumSet;
import com.microsoft.graph.http.BaseCollectionPage;
import com.google.gson.JsonObject;
import com.google.gson.annotations.SerializedName;
import com.google.gson.annotations.Expose;
import javax.annotation.Nullable;
import javax.annotation.Nonnull;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Upload Session.
*/
public class UploadSession implements IJsonBackedObject, com.microsoft.graph.tasks.IUploadSession {
/** the OData type of the object as returned by the service */
@SerializedName("@odata.type")
@Expose
@Nullable
public String oDataType;
private transient AdditionalDataManager additionalDataManager = new AdditionalDataManager(this);
@Override
@Nonnull
public final AdditionalDataManager additionalDataManager() {
return additionalDataManager;
}
/**
* The Expiration Date Time.
* The date and time in UTC that the upload session will expire. The complete file must be uploaded before this expiration time is reached.
*/
@SerializedName(value = "expirationDateTime", alternate = {"ExpirationDateTime"})
@Expose
@Nullable
public java.time.OffsetDateTime expirationDateTime;
/**
* The Next Expected Ranges.
* When uploading files to document libraries, this is a collection of byte ranges that the server is missing for the file. These ranges are zero-indexed and of the format, '{start}-{end}' (e.g. '0-26' to indicate the first 27 bytes of the file). When uploading files as Outlook attachments, instead of a collection of ranges, this property always indicates a single value '{start}', the location in the file where the next upload should begin.
*/
@SerializedName(value = "nextExpectedRanges", alternate = {"NextExpectedRanges"})
@Expose
@Nullable
public java.util.List<String> nextExpectedRanges;
/**
* The Upload Url.
* The URL endpoint that accepts PUT requests for byte ranges of the file.
*/
@SerializedName(value = "uploadUrl", alternate = {"UploadUrl"})
@Expose
@Nullable
public String uploadUrl;
/**
* Sets the raw JSON object
*
* @param serializer the serializer
* @param json the JSON object to set this object to
*/
public void setRawObject(@Nonnull final ISerializer serializer, @Nonnull final JsonObject json) {
}
/**
* Gets the Upload Url.
* The URL endpoint that accepts PUT requests for byte ranges of the file.
* @return the upload Url
*/
@Nullable
@Override
public String getUploadUrl() {
return uploadUrl;
}
/**
* Gets the Next Expected Ranges.
* A collection of byte ranges that the server is missing for the file. These ranges are zero indexed and of the format 'start-end' (e.g. '0-26' to indicate the first 27 bytes of the file). When uploading files as Outlook attachments, instead of a collection of ranges, this property always indicates a single value '{start}', the location in the file where the next upload should begin.
* @return the Next Expected Ranges.
*/
@Nullable
@Override
public java.util.List<String> getNextExpectedRanges() {
return nextExpectedRanges;
}
}
|
package be.peopleware.jsf_II.persistence;
import java.util.Arrays;
import javax.faces.component.UIViewRoot;
import javax.faces.context.FacesContext;
import javax.faces.event.ActionEvent;
import javax.servlet.ServletRequestListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import be.peopleware.bean_IV.CompoundPropertyException;
import be.peopleware.exception_I.TechnicalException;
import be.peopleware.jsf_II.FatalFacesException;
import be.peopleware.jsf_II.RobustCurrent;
import be.peopleware.persistence_I.IdNotFoundException;
import be.peopleware.persistence_I.PersistentBean;
import be.peopleware.persistence_I.dao.AsyncCrudDao;
/**
* <p>Handler for {@link PersistentBean} detail CRUD pages.</p>
* <p>This handler can be used in a number of circumstances. The main use
* is as the backing bean for a detail CRUD page of an instance of semantics.
* For this to work, the handler needs a {@link #getDao()} the
* {@link #getType()} filled out,
* needs to know the previous {@link #getViewMode()},
* and it needs an {@link #getInstance() instance}.</p>
* <p>The instance can be set explicitly with {@link #setInstance(PersistentBean)}.
* This immediately also sets the {@link #getId()}. If the
* {@link #getInstance() instance} is <code>null</code> when it is requested,
* we will retrieve the instance with id {@link #getId()} and type
* {@link #getType()} from persistent storage, and cache it. If at this time
* {@link #getId()} is <code>null</code>, a new instance of {@link #getType()}
* will be created.</p>
* <p>In conclusion this means that, before an instance of this class can be used,
* you need to set the<p>
* <ul>
* <li>the {@link #setType(Class) type} and</li>
* <li>the {@link #setViewMode(String) previous view mode}, and</li>
* <li>either
* <ul>
* <li>an {@link #setInstance(PersistentBean) instance},</li>
* <li>or
* <ul>
* <li>a {@link #setDao(AsyncCrudDao) dao}, and</li>
* <li>an {@link #setId(Long) id}.</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* <h2>States & Transitions</h2>
* <p>An <code>PersistentBeanCrudHandler</code> has 4 states and
* a number of state transitions. The states are actually <dfn>view modes</dfn>
* for the JSF page.</p>
* <img src="doc-files/persistence.gif" style="width: 100%;" />
*
* <h3>Retrieving the {@link be.peopleware.persistence_I.PersistentBean Instance}</h3>
* <p>With each HTTP request (except the request for a screen to fill out the
* values for a new instance, the request to create a new instance, the request
* to show a list of persistent beans and a request to go back to the
* previous page) we expect the
* {@link be.peopleware.persistence_I.PersistentBean#getId() primary key of a
* persistent bean} as request parameter. This id is filled in in the handler's
* {@link #getId()} property. Before
* the Update Model Values phase is reached, we need to load the
* {@link be.peopleware.persistence_I.PersistentBean} with
* {@link #getId() this id} and type {@link #getType()} from
* persistent storage. This instance will be stored in the
* {@link #getInstance() instance handler property}.</p>
* <p>It is always possible that no instance with {@link
* #getId() this id} and {@link #getType() type} can be found
* in persistent storage, because it never existed, or because such instance
* was removed in-between user requests from persistent storage. Whenever this
* occurs, the user will be directed back to the page he originally came from,
* before he accessed this page.</p>
* <p>Again it is possible that the previous instance does not exist in persistent
* storage anymore, so this process is recursive, until the first page after
* login is reached.</p>
* <p>If the user itself is removed from the system while logged in, the session
* will be stopped as early as possible, and the user will return to the login
* page.</p>
* <p>HOW IS THIS ACHIEVED?</p>
* <h3>Entry</h3>
* <p>The state machine is entered by a navigation request (<code>navigate(id)
* [found(id)]</code>), or a request to edit a new instance
* (<code>editNew()</code>).</p>
* <p>Entry is requested always from the handler of a previous page. When entry
* succeeds, the previous page is recorded in the TODO NavigationStack, so we
* can go back to it later.</p>
* <h3>Display</h3>
* <p>In view mode <code>display</code>, the data of the {@link
* be.peopleware.persistence_I.PersistentBean} is shown, in a non-editable
* way.</p>
* <h4>delete</h4>
* <p>From this state, we can try to delete the {@link
* be.peopleware.persistence_I.PersistentBean}. When the bean is not found in
* persistent storage (<code>delete(id) [! found(id)]</code>), the user is
* brought back to the previous page, and a message is shown. If the bean is
* found in persistent storage, deletion might fail for semantic reasons
* (<code>delete(id) [found(id) && exception]</code>). In this case, we
* stay in <code>display</code> state, and show a message to the user about the
* exception. When deletion succeeds (<code>delete(id) [found(id) &&
* nominal] / DELETE</code>), the instance is deleted from persistent storage,
* and the handler goes to the <code>deleted</code> state.</p>
* <p>This is implemented in the action method {@link #delete()}. The user can
* request the deletion by clicking a button defined as:</p>
* <pre>
* <h:commandButton action="#{<var>myHandler</var>.delete}"
* value="#{<var>myHandler</var>.buttonLabels['delete']}"
* rendered="#{<var>myHandler</var>.viewMode eq 'display'}"
* immediate="true"/>
* </pre>
* <h4>edit</h4>
* <p>In <code>display</code> mode, the user can also ask for <code>edit</code> mode. When the
* bean is not found in persistent storage (<code>edit(id) [! found(id)]</code>),
* the user is brought back to the previous page, and a message is shown. If the
* bean is found in persistent storage (<code>edit(id) [found(id)]</code>), the
* handler goes to the <code>edit</code> state.</p>
* <p>This is implemented in action method {@link #edit()}. The user can request
* view mode <code>edit</code> by clicking a button defined as:</p>
* <pre>
* <h:commandButton action="#{<var>myHandler</var>.edit}"
* value="#{<var>myHandler</var>.buttonLabels['edit']}"
* rendered="#{<var>myHandler</var>.viewMode eq 'display'}"
* immediate="true"/>
* </pre>
* <h4>goBack</h4>
* <p>From display mode, the user can request to go back to the previous
* page (<code>goBack()</code>). If the previous instance cannot be found, this
* action propagates the user to instances visited earlier, recursively.</p>
* <p>HOW IS THIS IMPLEMENTED? The user can request to go back by clicking a
* button defined as:</p>
* <pre>
* MUDO (jand) NO IDEA YET
* </pre>
* <h4>navigate</h4>
* <p>Finally, the user can request to navigate to another page
* (<code>nextPageHandler.navigate(nextPageId) [found(nextPageId)]</code>).
* Presumably, he clicks on a link to navigate to a related bean. This state
* transition is a placeholder for any number of possible navigation points in
* the page. The intention is to show the user a page for the next instance, in
* <code>display</code> mode. It is possible that this next instance cannot be
* found in persistent storage <code>(nextPageHandler.navigate(nextPageId) [!
* found(nextPageId)]</code>). In that case, we stay in <code>display</code>
* state in this page, and show the user a message.</p>
* <p>This is not implemented in this generic handler.</p>
* <h3>Edit</h3>
* <p>In view mode <code>edit</code>, the data of the {@link
* be.peopleware.persistence_I.PersistentBean} is shown, in an editable way. This
* means that the user can change the current values of the properties of the
* persistent bean or fill in the value of properties that were not specified
* before.</p>
* <h4>cancel</h4>
* <p>From this state, we can try to return to the <code>display</code> state
* without applying the changes. When the bean is not found in persistent storage
* (<code>cancel(id) [!found(id)]</code>), the user is brought back to the
* previous page, and a message is shown. If the bean is found in persistent
* storage (<code>cancel(id) [found(id)]</code>), we return to the
* <code>display</code> state and the values that were filled in in the
* <code>edit</code> state are forgotten; this means that the old
* values of the bean are shown in the <code>display</code> state and that the
* persistent bean is unchanged.</p>
* <p>This is implemented in action method {@link #cancelEdit()}. The user can
* request cancellation by clicking a button defined as:</p>
* <pre>
* <h:commandButton action="#{<var>myHandler</var>.cancelEdit}"
* value="#{<var>myHandler</var>.buttonLabels['cancel']}"
* rendered="#{<var>myHandler</var>.viewMode eq 'edit'}"
* immediate="true"/>
* </pre>
* <h4>update</h4>
* <p>From the <code>edit</code> state, we can try to update the {@link
* be.peopleware.persistence_I.PersistentBean} with the values filled in by the
* user. When the bean is not found in persistent storage (<code>update(id, data)
* [! found(id)]</code>), the user is brought back to the
* previous page, and a message is shown. If the bean is found in persistent
* storage, updating might fail for semantic reasons (<code>update(id, data)
* [found(id) && exception]</code>). In this case, we stay in the
* <code>edit</code> state, and show a message to the user about the exception.
* When updating succeeds (<code>update(id, data) [found(id) &&
* nominal]</code>), the handler goes to the <code>display</code> state and the
* persistent bean is updated in persistent storage.</p>
* <p>This is implemented in action method {@link #update()}. The user can request
* storing the new information by clicking a button defined as:</p>
* <pre>
* <h:commandButton action="#{<var>myHandler</var>.update}"
* value="#{<var>myHandler</var>.buttonLabels['commit']}"
* rendered="#{<var>myHandler</var>.viewMode eq 'edit'}" />
* </pre>
* <h3>Edit New</h3>
* <p>In view mode <code>editNew</code>, the user is shown a page, with empty,
* editable fields for him to fill in. These fields represent a new bean of type
* {@link #getType()}. It is
* possible that some data is filled out when this page is shown, e.g., default
* data, or good guesses.</p>
* <h4>cancel</h4>
* <p>If the user changes his mind, he can request to cancel the
* <code>editNew</code> state (<code>cancel()</code>). This amounts to the user
* issuing a request to go back to the previous page (<code>goBack()</code>). If
* the previous instance cannot be found, this action propagates the user to
* instances visited earlier, recursively.</p>
* <p>This is implemented in action method {@link #cancelEditNew()}. The user can
* request cancellation by clicking a button defined as:</p>
* <pre>
* <h:commandButton action="#{<var>myHandler</var>.cancelEditNew}"
* value="#{<var>myHandler</var>.buttonLabels['cancel']}"
* rendered="#{<var>myHandler</var>.viewMode eq 'editNew'}"
* immediate="true"/>
* </pre>
* <h4>create</h4>
* <p>From view mode <code>editNew</code>, the user can submit data for the actual
* creation of a new bean in persistent storage. This can fail with semantic
* exceptions (<code>create(data) [exception]</code>). We stay in the
* <code>editNew</code> state, and show the user messages about the errors. If
* creation succeeds (<code>create(data) [nominal]</code>), the new instance is
* created in persistent storage, and the handler goes to the
* <code>display</code> state.</p>
* <p>This is implemented in action method {@link #create()}. The user can request
* storing the new information by clicking a button defined as:</p>
* <pre>
* <h:commandButton action="#{<var>myHandler</var>.create}"
* value="#{<var>myHandler</var>.buttonLabels['commit']}"
* rendered="#{<var>myHandler</var>.viewMode eq 'editNew'}" />
* </pre>
* <h3>Deleted</h3>
* <p>In view mode <code>deleted</code>, the data of the deleted {@link
* be.peopleware.persistence_I.PersistentBean} is shown, in a non-editable way,
* with visual feedback about the fact that it was deleted (e.g., strikethrough).
* <h4>goBack</h4>
* <p>From this mode, the only action of the user can be to request to go back to
* the previous page (<code>goBack()</code>). If the previous instance cannot be
* found, this action propagates the user to instances visited earlier,
* recursively.</p>
* <p>HOW IS THIS IMPLEMENTED? BUTTON?</p>
* <h2>Remembering State</h2>
* <p>With this setup, the only state that must be remembered in-between HTTP
* requests is the id of the {@link PersistentBean} we are working with, and the
* view mode of the handler. By storing this information in the actual HTML page,
* we essentially make the handler stateless.</p>
* <p>This information needs to be filled out in the handler properties ({@link
* #getId()} and {@link #getViewMode()} before the action methods are executed. This
* can be achieved by storing these values in hidden text fields in the JSF page
* with the <code>immediate</code> attribute set to <code>true</code>:</p>
* <pre>
* <h:inputHidden value="#{<var>myHandler</var>.id}" immediate="true" />
* <h:inputHidden value="#{<var>myHandler</var>.viewMode}" immediate="true" /></pre>
* <p>
* Since the <code>immediate</code> attribute of these inputHidden tags is set
* to true, the corresponding setters {@link #setId(Long)} and
* {@link #setViewMode(String)} will be called early in the request / response
* cycle, namely during the Apply Request Values Phase. We give the
* {@link #setId(Long)} method side-effects that initialise the persistent bean
* and all other resources that are needed during the request / response cycle.
* In this way, the handler is initialised early in the request / response cycle.
* This is necessary, because, e.g., for some of the action methods
* ({@link #update()} and {@link #create()}), the bean must be available in
* {@link #getInstance()} before the Update Model Values phase, to receive
* values from the UIView.</br>
* If no instance with the requested id can be found in persistent storage,
* {@link #getInstance()} will be <code>null</code> afterwards to signal this.
* <br />
* The {@link #setId(Long)} method cannot however be used to create a new bean
* for entry into view mode <code>editNew</code>, and for the {@link #create()}
* action method. This is because the setter {@link #setId(Long)} will not be
* called when the request parameter for the id is <code>null</code>,
* since the id property is <code>null</code> already (see below).
* Therefore, creating a new bean is done as a side-effect in the
* {@link #setViewMode(String)} method, whenever the requested view mode
* is <code>editNew</code>.</p>
* <p>To make sure that the hidden fields described above contain the correct values at the
* beginning of a request / response cycle, the corresponding handler must hold
* the correct values for the properties {@link #getId()} and {@link #getViewMode()}
* before the Render Response phase of the previous cycle is entered.</p>
* <p>To guarantee that the setters {@link #setId(Long)} and
* {@link #setViewMode(String)} are called during the Apply Request Values
* Phase, the {@link #getId()} and {@link #getViewMode()} should be set to null
* after the Render Response Phase. This is because
* the {@link #setId(Long)} (resp. {@link #setViewMode(String)}) method is only
* executed when the old value of {@link #getId()} (resp. {@link #getViewMode()})
* and the <code>id</code> (resp. <code>viewMode</code>) that comes as a
* parameter with the HTTP request are different. To achieve this, the
* {@link #getId()} (resp. {@link #getViewMode()}) should be set to null at the
* end of each cycle.</p>
* <p>JavaServer Faces does not offer the possibility to do anything after the
* response is rendered. So we need an extra mechanism that clears the handler.
* This is done by a special {@link ServletRequestListener} that magically knows which
* handler to reset.</p>
* <h2>Not Remembering State</h2>
* <p>After the response is rendered completely, the {@link #getId()} and {@link
* #getInstance()} (and possibly other resources) are no longer needed, and
* should be set to <code>null</code>, to avoid clogging memory in-between user
* requests. Setting the {@link #getId()} and {@link #getViewMode()} to null
* is also necessary for functional reasons, as described above. Releasing
* resources can be achieved by setting the handlers in request scope.</p>
* <h2>Configuration</h2>
* <p>This class requires the definition of 2 filters in
* <kbd>WEB-INF/web.xml</kbd>:</p>
* <pre>
* <listener>
* <listener-class>be.peopleware.servlet_I.hibernate.SessionFactoryController</listener-class>
* <listener-class>be.peopleware.servlet_I.hibernate.SessionInView</listener-class>
* </listener>
* </pre>
*
*
* @author Jan Dockx
* @author Nele Smeets
* @author Peopleware n.v.
*
* @invar (getViewMode() != null)
* ? isViewMode(getViewMode())
* : true;
* @invar (getInstance() != null)
* ? getType().isInstance(getInstance())
* : true;
*
* @idea (jand) gather viewmode in separate class
* @mudo (jand) security
*/
public class PersistentBeanCrudHandler extends AbstractPersistentBeanHandler {
/*<section name="Meta Information">*/
/** {@value} */
public static final String CVS_REVISION = "$Revision$";
/** {@value} */
public static final String CVS_DATE = "$Date$";
/** {@value} */
public static final String CVS_STATE = "$State$";
/** {@value} */
public static final String CVS_TAG = "$Name$";
/*</section>*/
private static final Log LOG = LogFactory.getLog(PersistentBeanCrudHandler.class);
public PersistentBeanCrudHandler() {
LOG.debug("constructor of PersistentBeanCrudHandler");
}
// public final void setDao(final AsyncCrudDao dao) {
// super.setDao(dao);
// if (getNavigationString() != null && getType() != null) {
// ValueChangeEvent event = null;
// setIdAndInitialisePersistentBean(event);
// setViewModeAndInitialisePersistentBean(event);
// public final void setNavigationString(final String navigationString) {
// super.setNavigationString(navigationString);
// if (getDao() != null && getType() != null) {
// ValueChangeEvent event = null;
// setIdAndInitialisePersistentBean(event);
// setViewModeAndInitialisePersistentBean(event);
// public final void setTypeAsString(final String type) {
// super.setTypeAsString(type);
// if (getDao() != null && getNavigationString() != null) {
// ValueChangeEvent event = null;
// setIdAndInitialisePersistentBean(event);
// setViewModeAndInitialisePersistentBean(event);
/*<property name="id">*/
/**
* The id of the {@link PersistentBean} that is handled by the requests.
*
* @basic
* @init null;
*/
public final Long getId() {
return $id;
}
/**
* Store the given id in {@link #getId()}.
*
* @param id
* The id of the {@link PersistentBean} that will be handled in the
* requests.
* @post new.getId().equals(id);
*/
public final void setId(final Long id) {
// set the id
$id = id;
LOG.debug("id of " + this + " set to " + id);
}
// /**
// * Retrieve the persistent bean of type {@link type} and id {@link id}
// * from persistent storage, and store it in {@link #getInstance()}.
// *
// * If the necessary arguments and utilities are not set, exceptions are thrown.
// *
// * If no bean of type {@link type} with id {@link id} is found in
// * persistent storage, {@link #getInstance()} is forced to <code>null</code>.
// *
// * @param dao
// * The dao used to retrieve the {@link PersistentBean} from storage.
// * @param id
// * The id of the {@link PersistentBean} to retrieve.
// * @param type
// * The type of the {@link PersistentBean} to retrieve.
// * @pre dao != null;
// * @post (new.getInstance() != null)
// * ? id.equals(new.getInstance().getId())
// * : true;
// * @post (new.getInstance() != null)
// * ? type.isInstance(new.getInstance())
// * : true;
// * @throws IdException
// * id == null;
// * @throws IdException
// * type == null;
// * @throws TechnicalException tExc
// * ; something technical went wrong, but surely
// * ! (tExc instanceof IdNotFoundException)
// */
// private void retrieveWithId(final AsyncCrudDao dao, Long id, Class type)
// throws IdException, TechnicalException {
//// (nsmeets) waarom worden die drie dingen als param meegegeven?
// // mudo (jand) remove params
// try {
// assert dao != null;
// //id or type are not known so passing the id to the exception is useless
// if (id == null) {
// LOG.error("id == null");
// throw new IdException("ID_NULL", null, type);
// if (type == null) {
// LOG.error("type == null");
// throw new IdException("TYPE_NULL", null, type);
// LOG.debug("retrieving persistent bean with id "
// + id.toString() + " and type "
// + type.getName() + "...");
// $instance = dao.retrievePersistentBean(id, type); // IdNotFoundException, TechnicalException
// if (LOG.isDebugEnabled()) { // @mudo (nsmeets) consequent overal doen? Alleen bij dingen die veel vergen.
// // if makes that there really is lazy loading if not in debug
// LOG.debug("retrieved persistent bean is " + getInstance());
// assert getInstance() != null;
// assert getInstance().getId().equals(id);
// assert type.isInstance(getInstance());
// catch (IdNotFoundException e) {
// // this will force $instance null
// LOG.info("could not find instance of type "
// + type.getName()
// + " with id " + id, e);
// $instance = null;
// catch (TechnicalException e) {
// LOG.error("exception during retrieveWithId", e);
// throw e;
// /**
// * Store the given id and retrieve the corresponding {@link PersistentBean}
// * from storage.
// *
// * Store the given id in {@link #getId()}.
// * Load the {@link PersistentBean} with the given id, whose type is equal to
// * {@link #getType()} from persistent storage and store this bean in
// * {@link #getInstance()}.
// * If no such bean is found in persistent storage, or when some
// * technical exception occurs, {@link #getInstance()} is set
// * to <code>null</code>.
// *
// * @param id
// * The id of the {@link PersistentBean} that will be handled in the
// * requests.
// * @post new.getId().equals(id);
// * @post (new.getInstance() != null)
// * ? new.getInstance().getId().equals(id)
// * : true;
// * @post (new.getInstance() != null)
// * ? getType().isInstance(new.getInstance())
// * : true;
// */
// private final void setIdAndInitialisePersistentBean(final Long id) {
// // set the id
// setId(id);
// // load the persistent bean with type getType() and the given id from
// // persistent storage
// try {
// retrieveWithId(getDao(), id, getType()); // IdException, TechnicalException
// catch(IdException exc) {
// // This exception is thrown when id == null or getType() == null.
// // 1. when id == null, then (normally) a new bean is created in
// // {@link #setViewMode}, so we leave {@link #getInstance()} unchanged
// // 2. getType() cannot be null; the type of a handler should be declared
// // as a managed property in faces-config.xml
// catch(TechnicalException exc) {
// $instance = null;
// // @idea (nsmeets) retrieve other resources
// public void setIdAndInitialisePersistentBean(ValueChangeEvent event) {
// Map requestParameterMap = RobustCurrent.externalContext().getRequestParameterMap();
// // get id
// String idString = (String)requestParameterMap.get("form:id");
// Long id = null;
// if (idString != null && !idString.equals("")) {
// id = Long.valueOf(idString);
// // set id
// setIdAndInitialisePersistentBean(id);
/**
* The id of the {@link PersistentBean} that will be handled
* by the requests.
*/
private Long $id;
/*</property>*/
/*<property name="viewMode">*/
/** {@value} */
public final static String VIEWMODE_DISPLAY = "display";
/** {@value} */
public final static String VIEWMODE_EDIT = "edit";
/** {@value} */
public final static String VIEWMODE_EDITNEW = "editNew";
/** {@value} */
public final static String VIEWMODE_DELETED = "deleted";
/**
* { {@link #VIEWMODE_DISPLAY}, {@link #VIEWMODE_EDIT},
* {@link #VIEWMODE_EDITNEW}, {@link #VIEWMODE_DELETED} };
*/
public final static String[] VIEWMODES
= {VIEWMODE_DISPLAY, VIEWMODE_EDIT, VIEWMODE_EDITNEW, VIEWMODE_DELETED};
/**
* Does <code>viewMode</code> represent a valid view mode?
*
* @param viewMode
* The viewMode to be checked.
* @return Arrays.asList(VIEWMODES).contains(s);
*/
public static boolean isViewMode(String viewMode) {
return Arrays.asList(VIEWMODES).contains(viewMode);
}
/**
* The view mode of the handler.
*
* @basic
* @init null;
*/
public final String getViewMode() {
return $viewMode;
}
public final void setViewMode(String viewMode) throws IllegalArgumentException {
if (! isViewMode(viewMode)) {
throw new IllegalArgumentException("\"" + viewMode + "\" is not a valid view mode; " +
"it must be one of " + VIEWMODES);
}
// set the view mode
$viewMode = viewMode;
}
// setViewMode(viewMode);
// // When the view mode is equal to VIEWMODE_EDITNEW, then create
// // a new instance of {@link getType()} and store it in {@link #getInstance()}
// if ($viewMode.equals(VIEWMODE_EDITNEW)) {
// createNewInstance();
// public final void setViewModeAndInitialisePersistentBean(ValueChangeEvent event) {
// Map requestParameterMap = RobustCurrent.externalContext().getRequestParameterMap();
// // get view mode
// String viewModeString = (String)requestParameterMap.get("form:viewMode");
// // set view mode
// if (viewModeString != null && !viewModeString.equals("")) {
// setViewModeAndInitialisePersistentBean(viewModeString);
/**
* @invar ($viewMode != null)
* ? isViewMode($viewMode)
* : true;
*/
private String $viewMode;
/*</property>*/
/**
* Returns true when the handler is editable, i.e. when the view mode is
* equal to VIEWMODE_EDIT or VIEWMODE_EDITNEW. Returns false otherwise.
*
* This method is introduces to avoid writing
* myHandler.viewmode eq 'edit' or myHandler.viewmode eq 'editNew'
* and
* myHandler.viewmode neq 'edit' and myHandler.viewmode neq 'editNew'
* as value of the rendered attributes of in- and output fields in JSF pages,
* which is cumbersome.
* Now we can write
* myHandler.inViewModeEditOrEditNew
* and
* not myHandler.inViewModeEditOrEditNew
*
* @return getViewMode().equals(VIEWMODE_EDIT) ||
* getViewMode().equals(VIEWMODE_EDITNEW);
* @throws FatalFacesException
* getViewMode() == null;
*/
public final boolean isInViewModeEditOrEditNew() throws FatalFacesException {
if (getViewMode() == null) {
RobustCurrent.fatalProblem("ViewMode is null", LOG);
return false;
}
else {
return
getViewMode().equals(VIEWMODE_EDIT) ||
getViewMode().equals(VIEWMODE_EDITNEW);
}
}
/*<property name="instance">*/
/**
* The {@link PersistentBean} that is handled in the requests.
*
* @basic
* @init null;
*/
public final PersistentBean getInstance() {
if ($instance == null) {
if (getId() != null) {
loadInstance();
}
else {
createInstance();
}
}
return $instance;
}
public final void setInstance(PersistentBean instance) throws IllegalArgumentException {
if (! getType().isAssignableFrom(instance.getClass())) {
throw new IllegalArgumentException("instance " + instance +
" is not a subtype of " +
getType());
}
$instance = instance;
if (instance != null) {
setId(instance.getId());
}
// else, we do NOT set the ID to null
}
/**
* @pre getDao() != null;
* @throws FatalFacesException
* {@link AsyncCrudDao#retrievePersistentBean(java.lang.Long, java.lang.Class)} / {@link TechnicalException}
* @throws FatalFacesException
* MUDO (jand) other occurences must be replaced by goBack()
*/
private void loadInstance() throws FatalFacesException {
assert getDao() != null;
try {
if (getId() == null) {
RobustCurrent.fatalProblem("id is null", LOG);
// MUDO (jand) replace with goback?
}
if (getType() == null) {
RobustCurrent.fatalProblem("type is null", LOG);
// MUDO (jand) replace with goback?
}
LOG.debug("retrieving persistent bean with id "
+ getId() + " and type " + getType() + "...");
$instance = getDao().retrievePersistentBean(getId(), getType()); // IdNotFoundException, TechnicalException
assert getInstance() != null;
assert getInstance().getId().equals(getId());
assert getType().isInstance(getInstance());
if (LOG.isDebugEnabled()) {
// if makes that there really is lazy loading if not in debug
LOG.debug("retrieved persistent bean is " + getInstance());
}
}
catch (IdNotFoundException infExc) {
// this will force $instance null
LOG.info("could not find instance of type " + getType() +
" with id " + getId(), infExc);
$instance = null;
// MUDO goback() instead of exception
RobustCurrent.fatalProblem("could not find persistent bean with id " +
getId() + " of type " +
getType(), infExc, LOG);
}
catch (TechnicalException tExc) {
RobustCurrent.fatalProblem("could not retrieve persistent bean with id " +
getId() + " of type " +
getType(), tExc, LOG);
}
}
/**
* Create a new instance of type {@link #getType()} and store
* it in {@link #getInstance()}.
*
* @post new.getInstance() isfresh
* @post new.getInstance() == getType().newInstance();
*/
private void createInstance() {
try {
$instance = (PersistentBean)getType().newInstance();
}
// all exceptions are programmatic errors here, in subclass, in config or in JSF
catch (InstantiationException iExc) {
assert false : "exception while creating new instance of type " + getType() + iExc;
}
catch (IllegalAccessException iaExc) {
assert false : "exception while creating new instance of type " + getType() + iaExc;
}
catch (ExceptionInInitializerError eiiErr) {
assert false : "exception while creating new instance of type " + getType() + eiiErr;
}
catch (SecurityException sExc) {
assert false : "exception while creating new instance of type " + getType() + sExc;
}
catch (ClassCastException ccExc) {
assert false : "exception while creating new instance of type " + getType() + ccExc;
}
}
/**
* Method to be called after Render Response phase, to clear
* semantic data from the session.
*
* @post $instance == null;
*/
private void releaseInstance() {
$instance = null;
}
/**
* The {@link PersistentBean} that is handled in the requests.
*/
private PersistentBean $instance;
/*</property>*/
/**
* This method should be called from within another handler when navigating
* to this JSF page. A proper initialisation of this handler happens
* to be able to show the {@link PersistentBean} whose id is given
* in display mode.
*
* To initialise the handler properly, the following three steps are taken:
* 1. Store the given id in {@link #getId()}.
* 2. Load the {@link PersistentBean} with the given id, whose type is equal
* to {@link #getType()} from persistent storage and store this bean in
* {@link #getInstance()}. If no such bean is found in persistent storage,
* or when some technical exception occurs, this is signalled to the user
* by throwing an IdNotFoundException.
* 3. We go to display mode.
*
* @post new.getId().equals(id);
* @post (new.getInstance() != null)
* ? new.getInstance().getId().equals(id)
* : true;
* @post (new.getInstance() != null)
* ? getType().isInstance(new.getInstance())
* : true;
* @post new.getViewMode().equals(VIEWMODE_DISPLAY);
*
* @mudo (jand) security
*/
public final void navigateHere(ActionEvent aEv) throws FatalFacesException {
assert getType() != null : "type cannot be null";
LOG.debug("PersistentBeanCrudHandler.navigate called; initialising id and bean");
if (getInstance() == null) {
LOG.debug("no instance in " + this +
"; cannot navigate; staying where we are");
// String componentId = aEv.getComponent().getId();
// MUDO (jand) add i18n message!!!!
}
setViewMode(VIEWMODE_DISPLAY);
// create new view & navigate
FacesContext context = RobustCurrent.facesContext();
UIViewRoot viewRoot = RobustCurrent.viewHandler().createView(context, getDetailViewId());
context.setViewRoot(viewRoot);
context.renderResponse();
}
private final static String DETAIL_VIEW_ID_PREFIX = "/jsf/";
private final static String DETAIL_VIEW_ID_SUFFIX = ".jspx";
protected String getDetailViewId() {
assert getType() != null : "type cannot be null";
String typeName = getType().getName();
typeName = typeName.replace('.', '/');
return DETAIL_VIEW_ID_PREFIX + typeName + DETAIL_VIEW_ID_SUFFIX;
}
/**
* This is an action method that should be called by a button in the JSF
* page to go to edit mode.
*
* A more detailed description of this action method can be found in the
* class description.
*
* @post (getInstance() == null)
* ? 'return to previous page' &&
* result.equals(NO_INSTANCE)
* : true;
* @post (getInstance() != null && !getViewMode().equals(VIEWMODE_DISPLAY))
* ? getViewMode().equals(VIEWMODE_DISPLAY) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_DISPLAY))
* ? getViewMode().equals(VIEWMODE_EDIT) &&
* result.equals(null)
* : true;
* @mudo (jand) security
*/
public final String edit() {
LOG.debug("PersistentBeanCrudHandler.edit called; showing bean for edit");
LOG.debug("persistentBean: " + getInstance());
try {
checkConditions(VIEWMODE_DISPLAY); // ConditionException
setViewMode(VIEWMODE_EDIT);
return getNavigationString();
}
catch(ConditionException exc) {
return exc.getNavigationString();
}
}
/**
* This is an action method that should be called by a button in the JSF
* page to update a persistent bean in persistent storage.
*
* A more detailed description of this action method can be found in the
* class description.
*
* @post (getInstance() == null)
* ? 'return to previous page' &&
* result.equals(NO_INSTANCE)
* : true;
* @post (getInstance() != null && !getViewMode().equals(VIEWMODE_EDIT))
* ? getViewMode().equals(VIEWMODE_DISPLAY) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_EDIT)
* && 'updateValues generated messages')
* ? getViewMode().equals(VIEWMODE_EDIT) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_EDIT)
* && 'updateValues succeeded without messages'
* && 'update in storage generates no semantic exceptions')
* ? 'bean is updated in persistent storage' &&
* getViewMode().equals(VIEWMODE_DISPLAY) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_EDIT)
* && 'updateValues succeeded without messages'
* && 'update in storage generates semantic exceptions')
* ? getViewMode().equals(VIEWMODE_EDIT) &&
* result.equals(null)
* : true;
* @throws FatalFacesException
* When a TechnicalException is thrown by:
* {@link AsyncCrudDao#startTransaction()}
* {@link AsyncCrudDao#updatePersistentBean(be.peopleware.persistence_I.PersistentBean)}
* {@link AsyncCrudDao#commitTransaction(be.peopleware.persistence_I.PersistentBean)}
* {@link AsyncCrudDao#cancelTransaction()}
*/
public String update() throws FatalFacesException {
LOG.debug("PersistentBeanCrudHandler.update called; the bean is already partially updated");
LOG.debug("persistentBean: " + getInstance());
try {
AsyncCrudDao dao = getDao();
try {
checkConditions(VIEWMODE_EDIT); // ConditionException
updateValues();
LOG.debug("The bean is now fully updated");
LOG.debug("persistentBean: " + getInstance());
if (RobustCurrent.hasMessages()) {
// updateValues can create FacesMessages that signal semantic errors
return null;
}
else {
dao.startTransaction(); // TechnicalException
dao.updatePersistentBean(getInstance()); // TechnicalException, CompoundPropertyException
dao.commitTransaction(getInstance()); // TechnicalException, CompoundPropertyException
setViewMode(VIEWMODE_DISPLAY);
return null;
}
}
catch(CompoundPropertyException cpExc) {
LOG.debug("update action failed; cancelling ...", cpExc);
dao.cancelTransaction(); // TechnicalException
LOG.debug("update action cancelled; using exception as faces message");
RobustCurrent.showCompoundPropertyException(cpExc);
setViewMode(VIEWMODE_EDIT);
return null;
}
catch(ConditionException exc) {
return exc.getNavigationString();
}
}
catch(TechnicalException exc) {
RobustCurrent.fatalProblem("Could not update " + getInstance(), exc, LOG);
return null;
}
}
/**
* <p>
* This method can be used to update properties of {@link #getInstance()}
* that are not updated during the Update Model Values.
* </p>
* <p>
* Suppose that a JSF page contains the following tag:
* </p>
* <pre>
* <h:inputText value="#{<var>myHandler</var>.instance.name}" />
* </pre>
* <p>
* During the Update Model Values phase, the setName(String) method of the
* bean stored in {@link #getInstance()} will be called, thereby updating
* the value of the name property. Similarly, other properties are updated.
* </p>
* <p>
* But there can also be properties of a {@link PersistentBean} that are not
* updated 'automatically' during the Update Model Values phase. An example
* of this is a property <code>date</code> of type {@link java.util.Date},
* that is represented in the JSF page by three inputText tags representing
* year, month and day.
* <p>
* <pre>
* <h:inputText value="#{<var>myHandler</var>.year}" />
* <h:inputText value="#{<var>myHandler</var>.month}" />
* <h:inputText value="#{<var>myHandler</var>.day}" />
* </pre>
* <p>
* Because the values of these tags do not correspond directly
* to a property in the {@link PersistentBean}, the tags are backed by
* three properties ($year, $month, $day) in the handler with corresponding
* get and set methods. During the Update Model Values phase, the three
* properties are updated in the handler. The bean itself can then be
* updated during the Invoke Application phase, using the
* {@link #updateValues()} method. The implementation of this method could
* then be:
* </p>
* <pre>
* Date date
* = (new GregorianCalendar(getYear(), getMonth(), getDay())).getTime();
* ((SomeType) getInstance()).setDate(date);
* </pre>
* <p>
* The default implementation of this method does nothing.
* </p>
*/
protected void updateValues() {
// NOP
}
/**
* This method should be called from within another handler to pass
* a newly created persistent bean of type {@link #getType()} and show this
* bean in editNew mode. To do this, the handler should be properly
* initialised.
*
* A more detailed description of this action method can be found in the
* class description.
*
* To initialise the handler properly, the following two steps are taken:
* 1. The given {@link PersistentBean} is stored in {@link #getInstance()}.
* If the given bean is not effective, has an effective id (i.e. is not
* newly created), or is not of type {@link #getType()}, this is signalled
* to the user by throwing an InvalidBeanException.
* 2. We go to editNew mode.
*
* @param instance
* The {@link PersistentBean} that should be displayed.
* @post new.getInstance() == instance;
* @post new.getViewMode().equals(VIEWMODE_EDITNEW);
* @throws InvalidBeanException
* instance == null ||
* instance.getId() != null ||
* !getType().isInstance(instance);
*/
public final void editNew(PersistentBean instance) throws InvalidBeanException {
LOG.debug("PersistentBeanCrudHandler.editNew called; a new instance is stored in the handler");
if (instance == null || instance.getId() != null || !getType().isInstance(instance)) {
throw new InvalidBeanException(instance, getType());
}
$instance = instance;
assert getInstance() != null;
assert getInstance().getId() == null;
assert getType().isInstance(instance);
setViewMode(VIEWMODE_EDITNEW);
LOG.debug("Stored new persistent bean successfully");
}
public class InvalidBeanException extends Exception {
public InvalidBeanException(PersistentBean persistentBean, Class type) {
$persistentBean = persistentBean;
$type = type;
}
public PersistentBean getPersistentBean() {
return $persistentBean;
}
public Class getType() {
return $type;
}
private PersistentBean $persistentBean;
private Class $type;
}
/**
* This is an action method that should be called by a button in the JSF
* page to add a newly created persistent bean to persistent storage.
*
* A more detailed description of this action method can be found in the
* class description.
*
* @post (getInstance() == null)
* ? 'return to previous page' &&
* result.equals(NO_INSTANCE)
* : true;
* @post (getInstance() != null && !getViewMode().equals(VIEWMODE_EDITNEW))
* ? 'return to previous page' &&
* result.equals(INCORRECT_VIEWMODE)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_EDITNEW)
* && 'updateValues generated messages')
* ? getViewMode().equals(VIEWMODE_EDITNEW) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_EDITNEW)
* && 'updateValues succeeded without messages'
* && 'update in storage generates no semantic exceptions')
* ? 'bean is created in persistent storage' &&
* new.getId() == new.getInstance().getId()
* getViewMode().equals(VIEWMODE_DISPLAY) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_EDITNEW)
* && 'updateValues succeeded without messages'
* && 'update in storage generates semantic exceptions')
* ? getViewMode().equals(VIEWMODE_EDITNEW) &&
* result.equals(null)
* : true;
* @throws FatalFacesException
* When a TechnicalException is thrown by:
* {@link AsyncCrudDao#startTransaction()}
* {@link AsyncCrudDao#createPersistentBean(be.peopleware.persistence_I.PersistentBean)}
* {@link AsyncCrudDao#commitTransaction(be.peopleware.persistence_I.PersistentBean)}
* {@link AsyncCrudDao#cancelTransaction()}
*/
public final String create() throws FatalFacesException {
LOG.debug("PersistentBeanCrudHandler.create called; a new bean is created and is "+
"already partially updated");
LOG.debug("persistentBean: " + getInstance());
try {
AsyncCrudDao dao = getDao();
try {
if (getInstance() == null) {
return goBack(NO_INSTANCE);
}
if (!getViewMode().equals(VIEWMODE_EDITNEW)) {
return goBack(INCORRECT_VIEWMODE);
}
updateValues();
LOG.debug("The bean is now fully updated");
LOG.debug("persistentBean: " + getInstance());
if (RobustCurrent.hasMessages()) {
// updateValues can create FacesMessages that signal semantic errors
return null;
}
else {
dao.startTransaction(); // TechnicalException
dao.createPersistentBean(getInstance()); // TechnicalException, CompoundPropertyException
dao.commitTransaction(getInstance()); // TechnicalException, CompoundPropertyException
assert getInstance().getId() != null;
setId(getInstance().getId());
setViewMode(VIEWMODE_DISPLAY);
return null;
}
}
catch(CompoundPropertyException cpExc) {
LOG.debug("create action failed; cancelling ...", cpExc);
dao.cancelTransaction(); // TechnicalException
LOG.debug("create action cancelled; using exception as faces message");
RobustCurrent.showCompoundPropertyException(cpExc);
setViewMode(VIEWMODE_EDITNEW);
return null;
}
}
catch(TechnicalException exc) {
RobustCurrent.fatalProblem("Could not create " + getInstance(), exc, LOG);
return null;
}
}
public static final String NO_INSTANCE = "NO_INSTANCE";
public static final String INCORRECT_VIEWMODE = "INCORRECT_VIEWMODE";
public static final String CANCEL_EDITNEW = "CANCEL_EDITNEW";
/**
* This is an action method that should be called by a button in the JSF
* page to delete a persistent bean from persistent storage.
*
* A more detailed description of this action method can be found in the
* class description.
*
* @post (getInstance() == null)
* ? 'return to previous page' &&
* result.equals(NO_INSTANCE)
* : true;
* @post (getInstance() != null && !getViewMode().equals(VIEWMODE_DISPLAY))
* ? getViewMode().equals(VIEWMODE_DISPLAY) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_DISPLAY)
* && 'delete in storage generates no semantic exceptions')
* ? 'bean is deleted from persistent storage' &&
* getViewMode().equals(VIEWMODE_DELETED) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_DISPLAY)
* && 'delete in storage generates semantic exceptions')
* ? getViewMode().equals(VIEWMODE_DISPLAY) &&
* result.equals(null)
* : true;
* @throws FatalFacesException
* When a TechnicalException is thrown by:
* {@link AsyncCrudDao#startTransaction()}
* {@link AsyncCrudDao#deletePersistentBean(be.peopleware.persistence_I.PersistentBean)}
* {@link AsyncCrudDao#commitTransaction(be.peopleware.persistence_I.PersistentBean)}
* {@link AsyncCrudDao#cancelTransaction()}
*/
public final String delete() throws FatalFacesException {
LOG.debug("PersistentBeanCrudHandler.delete() called");
LOG.debug("persistentBean: " + getInstance());
AsyncCrudDao dao = getDao();
try {
try {
checkConditions(VIEWMODE_DISPLAY); // ConditionException
dao.startTransaction(); // TechnicalException
dao.deletePersistentBean(getInstance()); // TechnicalException
dao.commitTransaction(getInstance());// TechnicalException, CompoundPropertyException
assert getInstance().getId() == null;
setViewMode(VIEWMODE_DELETED);
return getNavigationString();
}
catch(ConditionException exc) {
return exc.getNavigationString();
}
catch (CompoundPropertyException cpExc) {
LOG.debug("delete action failed; cancelling ...", cpExc);
dao.cancelTransaction(); // TechnicalException
LOG.debug("delete action cancelled; using exception as faces message");
RobustCurrent.showCompoundPropertyException(cpExc);
setViewMode(VIEWMODE_DISPLAY);
return null;
}
}
catch(TechnicalException exc) {
RobustCurrent.fatalProblem("Could not delete " + getInstance(), exc, LOG);
return null;
}
}
/**
* Helper method used in action methods to check whether the persistent bean
* is effective and whether the current view mode corresponds to the
* view mode in which the action method should be called.
* If one of these conditions is not met, appropriate actions are taken.
*
* When {@link #getInstance()} is <code>null</code>, we return to a previous
* page.
* When the {@link #getInstance()} is effective, but the current view mode
* does not correspond to the given expected view mode, we go to display mode.
* In both cases, a ConditionException is thrown that contains the
* navigation string.
*
* @param expectedViewMode
* The view mode that should be checked.
* @pre isViewMode(expectedViewMode);
* @post true
* @throws ConditionException exc
* getInstance() == null
* && exc.getOutcome().equals(NO_INSTANCE);
* As a side effect, we return to a previous page.
* @throws ConditionException exc
* getInstance() != null && !getViewMode().equals(expectedViewMode)
* && exc.getOutcome().equals(display());
* As a side effect, we go to display mode.
*/
private void checkConditions(String expectedViewMode) throws ConditionException {
assert isViewMode(expectedViewMode);
String result = null;
if (getInstance() == null) {
result = NO_INSTANCE;
goBack(result);
throw new ConditionException(result);
}
else if (!expectedViewMode.equals(getViewMode())) {
setViewMode(VIEWMODE_DISPLAY);
throw new ConditionException(null);
}
}
/**
* A class of exceptions that is used when checking whether an action method
* is called under the correct conditions.
*
* A navigation string describes where to go when a certain condition is not
* met.
*
* @author nsmeets
*/
private class ConditionException extends Exception {
public ConditionException(String navigationString) {
$navigationString = navigationString;
}
public String getNavigationString() {
return $navigationString;
}
private String $navigationString;
}
/**
* Method to be called after Render Response phase, to clear
* semantic data from the session.
*
* @post getInstance() == null;
*/
void release() {
releaseInstance();
// mudo (jand) more code or remove?
}
/**
* This method returns to the page that was visited before this page.
* It is possible that this previous page cannot be displayed anymore
* (e.g. because the corresponding {@link PersistentBean} does not exist in
* persistent storage anymore), so this process is recursive, until the first
* page after login is reached.
* The method returns the navigation string needed to navigate to the
* previous page
*
* @param message
* A message signalling why we are going back to a previous page.
* @return @mudo
*/
public String goBack(String message) {
// MUDO
return message;
}
public String goBack() {
LOG.debug("goBack method called");
return null;
}
/**
* This is an action method that should be called by a button in the JSF
* page to cancel the update of an existing persistent bean
* (i.e. a persistent bean that was loaded from persistent storage,
* meaning that {@link #getInstance()} <code>!=null</code> and
* {@link #getInstance()}.{@link #getId()} <code>!=null</code>).
*
* A more detailed description of this action method can be found in the
* class description.
*
* @post (getInstance() == null)
* ? 'return to previous page' &&
* result.equals(NO_INSTANCE)
* : true;
* @post (getInstance() != null && !getViewMode().equals(VIEWMODE_EDIT))
* ? getViewMode().equals(VIEWMODE_DISPLAY) &&
* result.equals(null)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_EDIT))
* ? 'reset the UI components' &&
* getViewMode().equals(VIEWMODE_DISPLAY) &&
* result.equals(null)
* : true;
*/
public final String cancelEdit() {
LOG.debug("PersistentBeanCrudHandler.cancelEdit called; showing bean");
LOG.debug("persistentBean: " + getInstance());
try {
checkConditions(VIEWMODE_EDIT); // ConditionException
RobustCurrent.resetUIInputComponents();
setViewMode(VIEWMODE_DISPLAY);
return null;
}
catch(ConditionException exc) {
return exc.getNavigationString();
}
}
/**
* This is an action method that should be called by a button in the JSF
* page to cancel the creation of a new persistent bean
* (i.e. a persistent bean that was created in memory but not saved in
* persistent storage yet).
*
* A more detailed description of this action method can be found in the
* class description.
*
* @post 'return to previous page'
* @post (getInstance() == null)
* ? result.equals(NO_INSTANCE)
* : true;
* @post (getInstance() != null && !getViewMode().equals(VIEWMODE_EDITNEW))
* ? result.equals(INCORRECT_VIEWMODE)
* : true;
* @post (getInstance() != null && getViewMode().equals(VIEWMODE_EDITNEW))
* ? result.equals(CANCEL_EDITNEW)
* : true;
*/
public final String cancelEditNew() {
LOG.debug("PersistentBeanCrudHandler.cancelEditNew called; returning to previous page");
LOG.debug("persistentBean: " + getInstance());
if (getInstance() == null) {
return goBack(NO_INSTANCE);
}
if (!getViewMode().equals(VIEWMODE_EDITNEW)) {
return goBack(INCORRECT_VIEWMODE);
}
return goBack(CANCEL_EDITNEW);
}
}
|
package com.ociweb.pronghorn.image;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ociweb.pronghorn.image.schema.CalibrationStatusSchema;
import com.ociweb.pronghorn.image.schema.LocationModeSchema;
import com.ociweb.pronghorn.image.schema.ImageSchema;
import com.ociweb.pronghorn.pipe.ChannelReader;
import com.ociweb.pronghorn.pipe.ChannelWriter;
import com.ociweb.pronghorn.pipe.DataInputBlobReader;
import com.ociweb.pronghorn.pipe.DataOutputBlobWriter;
import com.ociweb.pronghorn.pipe.Pipe;
import com.ociweb.pronghorn.pipe.RawDataSchema;
import com.ociweb.pronghorn.pipe.RawDataSchemaUtil;
import com.ociweb.pronghorn.stage.PronghornStage;
import com.ociweb.pronghorn.stage.math.HistogramSchema;
import com.ociweb.pronghorn.stage.scheduling.GraphManager;
import com.ociweb.pronghorn.util.primitive.Lois;
import com.ociweb.pronghorn.util.primitive.LoisVisitor;
public class MapImageStage extends PronghornStage {
private static final int NO_DATA = -1;
private static final int SINGLE_BASE = -2;
private int[] workspace;
private Lois locations;
private int [] imageLookup;
private int imageWidth;
private int imageHeight;
private transient int loadPosition = NO_DATA;
private transient int savePosition = NO_DATA;
private final Pipe<ImageSchema> imgInput;
private final Pipe<RawDataSchema> loadingMappingData;
private final Pipe<RawDataSchema> savingMappingData;
private final Pipe<LocationModeSchema> modeIn;
private final Pipe<CalibrationStatusSchema> statusOut;
private final Pipe<CalibrationStatusSchema> ack;
private final Pipe<HistogramSchema> output;
private boolean isShuttingDown = false;
private boolean loadingNewMap = false;
private boolean imageInProgress = false;
private int totalRows;
private int totalWidth;
private long time;
private int activeRow;
//this provides for 64 colors which both helps with
// * simplification of what needs to be seen
// * significant reduction in memory consumption
private int shiftColors = 2;
private int localDepth = 256 >> shiftColors;
private int minCycles = 12;
private LoisVisitor sumVisitor = new LoisVisitor() {
@Override
public boolean visit(int value) {
workspace[value]++;
return true;
}
};
private boolean hasDataSet = false;
private boolean isLearning = false;
private int activeLearningLocationBase;
private int learningMaxSlices; //cycle may use less but not more than this or the location will change
private int cycleStep;//total steps in this cycle
private static final Logger logger = LoggerFactory.getLogger(MapImageStage.class);
public static MapImageStage newInstance(GraphManager graphManager,
Pipe<ImageSchema> imgInput,
Pipe<LocationModeSchema> stateData,
Pipe<HistogramSchema> output,
Pipe<CalibrationStatusSchema> ack,
Pipe<CalibrationStatusSchema> done,
Pipe<RawDataSchema> loadingMappingData,
Pipe<RawDataSchema> savingMappingData,
String colorLabel
) {
return new MapImageStage(graphManager, imgInput, stateData, output, ack, done, loadingMappingData, savingMappingData, colorLabel);
}
//need outgoing schema for the map.
protected MapImageStage(GraphManager graphManager,
Pipe<ImageSchema> imgInput,
Pipe<LocationModeSchema> modeIn,
Pipe<HistogramSchema> output,
Pipe<CalibrationStatusSchema> ack,
Pipe<CalibrationStatusSchema> statusOut,
Pipe<RawDataSchema> loadingMappingData,
Pipe<RawDataSchema> savingMappingData,
String colorLabel
) {
super(graphManager, join(imgInput,loadingMappingData, modeIn, ack), join(output, savingMappingData, statusOut) );
this.imgInput = imgInput;
this.loadingMappingData = loadingMappingData;
this.savingMappingData = savingMappingData;
this.modeIn = modeIn;
this.output = output;
this.statusOut = statusOut;
this.ack = ack;
GraphManager.addNota(graphManager, GraphManager.DOT_RANK_NAME, "ModuleStage", this);
GraphManager.addNota(graphManager, GraphManager.STAGE_NAME, colorLabel, this);
}
@Override
public void startup() {
locations = new Lois();
}
@Override
public void run() {
assert(savePosition!=-2 || loadPosition!=-2) : "Can only load or save but not do both at same time.";
//NOTE: if we are still saving the data do this first
if (savePosition == -2) {
if (locations.save(savingMappingData)) {
savePosition = -3;//done
} else {
return;
}
}
//NOTE: if we are still loading the data do this first.
if (loadPosition == -2) {
if (locations.load(loadingMappingData)) {
loadPosition = NO_DATA;//done
} else {
//need to try load again later
return;
}
}
if (!isShuttingDown) {
if (!imageInProgress) {
if (Pipe.hasContentToRead(loadingMappingData)) {
if (!load(loadingMappingData)) {
return;
}
}
if (Pipe.hasContentToRead(ack)) {
readAckData(ack);
}
if (Pipe.hasContentToRead(modeIn)) {
readModeData(modeIn);
}
}
//if we are not loading a new map check for an image to process
if (!loadingNewMap) {
//must have room to write results if we read any data.
while (Pipe.hasContentToRead(imgInput)
&& Pipe.hasRoomForWrite(output)
&& Pipe.hasRoomForWrite(statusOut)) {
int msgIdx = Pipe.takeMsgIdx(imgInput);
if (ImageSchema.MSG_FRAMECHUNK_2 == msgIdx) {
if (activeRow < totalRows) {
DataInputBlobReader<ImageSchema> rowData = Pipe.openInputStream(imgInput);
int rowBase = (imageWidth*localDepth)*activeRow++;
if (!isLearning) {
//normal location scanning
for(int activeColumn = 0; activeColumn<totalWidth; activeColumn++) {
int readByte = (0xFF&rowData.readByte())>>shiftColors;
int locationSetId = getLocationSetId(rowBase, activeColumn, readByte);
if (NO_DATA != locationSetId) {
if (locationSetId<0) {
//we have a single value so convert and match it
sumVisitor.visit(SINGLE_BASE - locationSetId);
} else {
locations.visitSet(locationSetId, sumVisitor );
}
}
}
if (activeRow == totalRows) {
if (hasDataSet) {
//only publish if is valid
publishHistogram();
}
finishedImageProcessing();
}
} else {
//learning
//given this root have we already seen this position recorded
//if so we are done, sent back done status
if (cycleStep>minCycles && isCycleComplete(rowData, rowBase, activeLearningLocationBase, learningMaxSlices)) {
hasDataSet = true;
//send done status to see if the other actors agree
publishCycleDone(activeLearningLocationBase, cycleStep);
}
//generate new location id
int activeLocation = activeLearningLocationBase + cycleStep;
//learn this new location
for(int activeColumn = 0; activeColumn<totalWidth; activeColumn++) {
int readByte = (0xFF&rowData.readByte())>>shiftColors;
int locationSetId = getLocationSetId(rowBase, activeColumn, readByte);
if (NO_DATA != locationSetId) {
if (locationSetId<0) {
//we now have 2 values stored here so extract first and collect both
int firstValue = SINGLE_BASE-locationSetId;
locationSetId = locations.newSet();
locations.insert(locationSetId, firstValue);
setLocationSetId(rowBase, activeColumn, locationSetId, readByte);
}
locations.insert(locationSetId, activeLocation);
} else {
//store single value as negative until a second needs to be stored
setLocationSetId(rowBase, activeColumn, SINGLE_BASE-activeLocation, readByte);
}
}
if (activeRow == totalRows) {
//ensure steps stays under the max slice value so location base is not disturbed.
if (++cycleStep >= learningMaxSlices) {
cycleStep = 0;
}
//no histogram to send..
finishedImageProcessing();
}
}
Pipe.confirmLowLevelRead(imgInput, Pipe.sizeOf(imgInput, msgIdx));
Pipe.releaseReadLock(imgInput);
} else {
//error too many rows.
logger.error("too many rows only expected {}",totalRows);
Pipe.skipNextFragment(imgInput, msgIdx);
}
} else if (ImageSchema.MSG_FRAMESTART_1 == msgIdx) {
imageInProgress = true;
totalWidth = Pipe.takeInt(imgInput);
totalRows = Pipe.takeInt(imgInput);
time = Pipe.takeLong(imgInput);
int frameBytes = Pipe.takeInt(imgInput);
int bitsPerPixel = Pipe.takeInt(imgInput);
ChannelReader reader = Pipe.openInputStream(imgInput);
if (null == workspace || imageWidth!=totalWidth || imageHeight!=totalRows) {
int maxLocatons = output.maxVarLen/ChannelReader.PACKED_INT_SIZE;
initProcessing(totalWidth, totalRows, maxLocatons);
}
//clear histogram totals
Arrays.fill(workspace, 0);
activeRow = 0;
Pipe.confirmLowLevelRead(imgInput, Pipe.sizeOf(imgInput, msgIdx));
Pipe.releaseReadLock(imgInput);
} else {
if (NO_DATA != msgIdx) {
throw new UnsupportedOperationException("Unexpected message idx of:"+msgIdx);
}
isShuttingDown = true;
Pipe.confirmLowLevelRead(imgInput, Pipe.EOF_SIZE);
Pipe.releaseReadLock(imgInput);
break;
}
}
}
} else {
if (savePosition==-3 || save(savingMappingData)) {
if (Pipe.hasRoomForWrite(output)) {
Pipe.publishEOF(output);
requestShutdown();
}
}
}
}
private void readModeData(Pipe<LocationModeSchema> pipe) {
while (Pipe.hasContentToRead(pipe)) {
int msgIdx = Pipe.takeMsgIdx(pipe);
switch (msgIdx) {
case LocationModeSchema.MSG_CYCLELEARNINGSTART_1:
activeLearningLocationBase = Pipe.takeInt(pipe);
learningMaxSlices = Pipe.takeInt(pipe);
isLearning = true;
cycleStep = 0;
break;
case LocationModeSchema.MSG_CYCLELEARNINGCANCEL_3:
isLearning = false;
int j = cycleStep;
while (--j>=0) {
locations.removeFromAll(activeLearningLocationBase+j);
}
break;
}
Pipe.confirmLowLevelRead(pipe, Pipe.sizeOf(pipe, msgIdx));
Pipe.releaseReadLock(pipe);
}
}
private void readAckData(Pipe<CalibrationStatusSchema> pipe) {
while (Pipe.hasContentToRead(pipe)) {
int msgIdx = Pipe.takeMsgIdx(pipe);
switch (msgIdx) {
case CalibrationStatusSchema.MSG_CYCLECALIBRATED_1:
isLearning = false;
final int locationBase = Pipe.takeInt(pipe);
assert(activeLearningLocationBase == locationBase) : "Completed message did not match location for learning start";
final int totalSteps = Pipe.takeInt(pipe);
//un-learn the steps after the point where all agreed.
int i = cycleStep;
while (--i>=totalSteps) {
locations.removeFromAll(locationBase+i);
}
break;
}
Pipe.confirmLowLevelRead(pipe, Pipe.sizeOf(pipe, msgIdx));
Pipe.releaseReadLock(pipe);
}
}
private void publishCycleDone(int activeLearningLocationBase, int cycleStep) {
Pipe.presumeRoomForWrite(statusOut);
int size = Pipe.addMsgIdx(statusOut, CalibrationStatusSchema.MSG_CYCLECALIBRATED_1);
Pipe.addIntValue(activeLearningLocationBase, statusOut);
Pipe.addIntValue(cycleStep, statusOut);
Pipe.confirmLowLevelWrite(statusOut, size);
Pipe.publishWrites(statusOut);
}
private boolean isCycleComplete(DataInputBlobReader<ImageSchema> rowData, int rowBase,
int activeLearningLocationBase, int learningMaxSlices) {
boolean isLoopCompleted = false;
int endValue = activeLearningLocationBase+learningMaxSlices;
//logger.info("checking for cycle complete looking between {} and {}", activeLearningLocationBase, endValue);
int totalMatches = 0;
int countLimit = (totalWidth*3)/4;
//logger.info("looking for {} matches in this row of {}", countLimit, totalWidth );
for(int activeColumn = 0; activeColumn<totalWidth; activeColumn++) {
int readByte = (0xFF&rowData.readByte()>>shiftColors);
int locationSetId = getLocationSetId(rowBase, activeColumn, readByte);
if (NO_DATA != locationSetId) {
if (locationSetId<0) {
//we have just 1 value so we check it
int value = (SINGLE_BASE-locationSetId);
//logger.info("looking at single value {}", value);
if ((value>=activeLearningLocationBase) && (value<endValue)) {
if (isLoopCompleted=(++totalMatches>countLimit)) {
break;
}
}
} else {
//logger.info("looking into range in a set");
if (locations.containsAny(locationSetId,
activeLearningLocationBase, endValue)) {
if (isLoopCompleted=(++totalMatches>countLimit)) {
break;
}
}
}
}// else {
//logger.info("no locations have been trained at this position");
}
logger.info("found only {} total matches of {} but must have {} for {} ", totalMatches, totalWidth, countLimit, toString());
return isLoopCompleted;
}
private int getLocationSetId(int rowBase, int activeColumn, int readByte) {
assert(rowBase>=0);
assert(activeColumn>=0);
assert(localDepth>=0);
return imageLookup[
rowBase
+(activeColumn*localDepth)
+readByte];
}
private void setLocationSetId(int rowBase, int activeColumn, int newId, int readByte) {
assert(rowBase>=0);
assert(activeColumn>=0);
assert(localDepth>=0);
imageLookup[ rowBase
+(activeColumn*localDepth)
+readByte] = newId;
}
public boolean save(Pipe<RawDataSchema> pipe) {
assert (pipe.maxVarLen<(ChannelReader.PACKED_INT_SIZE*4)) : "Pipes must hold longer messages to write this content";
while (Pipe.hasRoomForWrite(pipe)) {
int size = Pipe.addMsgIdx(pipe, RawDataSchema.MSG_CHUNKEDSTREAM_1);
ChannelWriter writer = Pipe.openOutputStream(pipe);
if (savePosition==NO_DATA) { //new file
writer.writePackedInt(imageWidth);
writer.writePackedInt(imageHeight);
writer.writePackedInt(shiftColors);
writer.writePackedInt(workspace.length); //locations
savePosition = 0;
}
while (savePosition<imageLookup.length && writer.remaining()>=ChannelReader.PACKED_INT_SIZE) {
writer.writePackedInt(imageLookup[savePosition++]);
}
writer.closeLowLevelField();
Pipe.confirmLowLevelWrite(pipe, size);
Pipe.publishWrites(pipe);
if (savePosition==imageLookup.length) {
savePosition = -2;
boolean result = locations.save(pipe); //if in this state keep calling.
if (result) {
loadPosition = NO_DATA;//done
}
return result;
}
}
return false;
}
private boolean load(Pipe<RawDataSchema> pipe) {
while (Pipe.hasContentToRead(pipe)) {
boolean isEnd = RawDataSchemaUtil.accumulateInputStream(pipe);
ChannelReader reader = Pipe.inputStream(pipe);
int startingAvailable = reader.available();
if (loadPosition == NO_DATA) {
//note this value here forces us to keep init at 16 and min block at 4
if (reader.available() < (ChannelReader.PACKED_INT_SIZE*4)) {
return false;//not enough data yet to read header cleanly
}
//load all the fixed constants here
int width = reader.readPackedInt();
int height = reader.readPackedInt();
shiftColors = reader.readPackedInt();
localDepth = 256>>shiftColors;
int locations = reader.readPackedInt();//max location value+1
initProcessing(width, height, locations);
}
while ( ((reader.available() >= ChannelReader.PACKED_INT_SIZE) || isEnd)
&& loadPosition<imageLookup.length ) {
imageLookup[loadPosition++] = reader.readPackedInt();
}
RawDataSchemaUtil.releaseConsumed(pipe, reader, startingAvailable);
if (loadPosition == imageLookup.length) {
loadPosition = -2;
boolean result = locations.load(pipe); //if in this state keep calling.
if (result) {
loadPosition = NO_DATA;//done
hasDataSet = true;
}
return result;
}
}
return false;
}
private void initProcessing(int width, int height, int locations) {
if (null == workspace || workspace.length != locations) {
workspace = new int[locations];
}
imageWidth = width;
imageHeight = height;
final int imageLookupLength = width*height*localDepth;
//init the image matrix as needed
if (null == imageLookup || imageLookup.length != imageLookupLength) {
imageLookup = new int[imageLookupLength];
Arrays.fill(imageLookup, NO_DATA);//this is a marker for no data
}
loadPosition = 0;
}
private void publishHistogram() {
Pipe.presumeRoomForWrite(output);
int size = Pipe.addMsgIdx(output, HistogramSchema.MSG_HISTOGRAM_1);
Pipe.addIntValue(workspace.length, output);
DataOutputBlobWriter<HistogramSchema> outputStream = Pipe.openOutputStream(output);
int i = workspace.length;
while (--i>=0) {
outputStream.writePackedInt(workspace[i]);
}
DataOutputBlobWriter.closeLowLevelField(outputStream);
Pipe.confirmLowLevelWrite(output, size);
Pipe.publishWrites(output);
}
private void finishedImageProcessing() {
totalRows = 0;//clear we have sent the value.
imageInProgress = false;
}
}
|
package com.ippon.jug.slip;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Paths;
@RestController
public class SlipController {
@RequestMapping(value = "/request")
public String getRequest() throws InterruptedException, UnknownHostException {
this.doSlip();
StringBuilder builder = new StringBuilder()
.append("Hello I'm ")
.append(InetAddress.getLocalHost().getHostName())
.append(". My little secret is ... ");
try {
Files.readAllLines(Paths.get("/run/secrets/bdx"))
.forEach(builder::append);
} catch (IOException e) {
// No secret here
builder.append(" UNKNOWN !");
}
return builder.toString();
}
private static final int SLEEP_MILLIS = 50;
private synchronized void doSlip() throws InterruptedException {
Thread.sleep(SLEEP_MILLIS);
}
@RequestMapping(value = "/dockerHealth")
public ResponseEntity<Void> getHealth() {
return new ResponseEntity<Void>(HttpStatus.OK);
}
}
|
package org.wildfly.extension.undertow;
import static org.jboss.as.controller.PersistentResourceXMLDescription.builder;
import java.util.List;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.PersistentResourceDefinition;
import org.jboss.as.controller.PersistentResourceXMLDescription;
import org.jboss.as.controller.PersistentResourceXMLParser;
import org.jboss.as.controller.operations.common.Util;
import org.jboss.dmr.ModelNode;
import org.wildfly.extension.undertow.filters.CustomFilterDefinition;
import org.wildfly.extension.undertow.filters.ErrorPageDefinition;
import org.wildfly.extension.undertow.filters.ExpressionFilterDefinition;
import org.wildfly.extension.undertow.filters.FilterDefinitions;
import org.wildfly.extension.undertow.filters.FilterRefDefinition;
import org.wildfly.extension.undertow.filters.GzipFilter;
import org.wildfly.extension.undertow.filters.ModClusterDefinition;
import org.wildfly.extension.undertow.filters.RequestLimitHandler;
import org.wildfly.extension.undertow.filters.ResponseHeaderFilter;
import org.wildfly.extension.undertow.filters.RewriteFilterDefinition;
import org.wildfly.extension.undertow.handlers.FileHandler;
import org.wildfly.extension.undertow.handlers.HandlerDefinitions;
import org.wildfly.extension.undertow.handlers.ReverseProxyHandler;
import org.wildfly.extension.undertow.handlers.ReverseProxyHandlerHost;
public class UndertowSubsystemParser_3_1 extends PersistentResourceXMLParser {
protected static final UndertowSubsystemParser_3_1 INSTANCE = new UndertowSubsystemParser_3_1();
private static final PersistentResourceXMLDescription xmlDescription;
static {
xmlDescription = builder(UndertowRootDefinition.INSTANCE, Namespace.UNDERTOW_3_1.getUriString())
.addAttributes(
UndertowRootDefinition.DEFAULT_SERVER,
UndertowRootDefinition.DEFAULT_VIRTUAL_HOST,
UndertowRootDefinition.DEFAULT_SERVLET_CONTAINER,
UndertowRootDefinition.INSTANCE_ID,
UndertowRootDefinition.DEFAULT_SECURITY_DOMAIN,
UndertowRootDefinition.STATISTICS_ENABLED)
.addChild(
builder(BufferCacheDefinition.INSTANCE)
.addAttributes(BufferCacheDefinition.BUFFER_SIZE, BufferCacheDefinition.BUFFERS_PER_REGION, BufferCacheDefinition.MAX_REGIONS)
)
.addChild(builder(ServerDefinition.INSTANCE)
.addAttributes(ServerDefinition.DEFAULT_HOST, ServerDefinition.SERVLET_CONTAINER)
.addChild(
listenerBuilder(AjpListenerResourceDefinition.INSTANCE)
// xsd ajp-listener-type
.addAttributes(AjpListenerResourceDefinition.SCHEME,
ListenerResourceDefinition.REDIRECT_SOCKET,
AjpListenerResourceDefinition.MAX_AJP_PACKET_SIZE)
)
.addChild(
listenerBuilder(HttpListenerResourceDefinition.INSTANCE)
// xsd http-listener-type
.addAttributes(
HttpListenerResourceDefinition.CERTIFICATE_FORWARDING,
ListenerResourceDefinition.REDIRECT_SOCKET,
HttpListenerResourceDefinition.PROXY_ADDRESS_FORWARDING,
HttpListenerResourceDefinition.ENABLE_HTTP2,
HttpListenerResourceDefinition.HTTP2_ENABLE_PUSH,
HttpListenerResourceDefinition.HTTP2_HEADER_TABLE_SIZE,
HttpListenerResourceDefinition.HTTP2_INITIAL_WINDOW_SIZE,
HttpListenerResourceDefinition.HTTP2_MAX_CONCURRENT_STREAMS,
HttpListenerResourceDefinition.HTTP2_MAX_FRAME_SIZE,
HttpListenerResourceDefinition.HTTP2_MAX_HEADER_LIST_SIZE)
).addChild(
listenerBuilder(HttpsListenerResourceDefinition.INSTANCE)
// xsd https-listener-type
.addAttributes(
HttpsListenerResourceDefinition.SECURITY_REALM,
HttpsListenerResourceDefinition.VERIFY_CLIENT,
HttpsListenerResourceDefinition.ENABLED_CIPHER_SUITES,
HttpsListenerResourceDefinition.ENABLED_PROTOCOLS,
HttpsListenerResourceDefinition.ENABLE_HTTP2,
HttpsListenerResourceDefinition.ENABLE_SPDY,
HttpsListenerResourceDefinition.SSL_SESSION_CACHE_SIZE,
HttpsListenerResourceDefinition.SSL_SESSION_TIMEOUT,
HttpListenerResourceDefinition.HTTP2_ENABLE_PUSH,
HttpListenerResourceDefinition.HTTP2_HEADER_TABLE_SIZE,
HttpListenerResourceDefinition.HTTP2_INITIAL_WINDOW_SIZE,
HttpListenerResourceDefinition.HTTP2_MAX_CONCURRENT_STREAMS,
HttpListenerResourceDefinition.HTTP2_MAX_FRAME_SIZE,
HttpListenerResourceDefinition.HTTP2_MAX_HEADER_LIST_SIZE)
).addChild(
builder(HostDefinition.INSTANCE)
.addAttributes(HostDefinition.ALIAS, HostDefinition.DEFAULT_WEB_MODULE, HostDefinition.DEFAULT_RESPONSE_CODE, HostDefinition.DISABLE_CONSOLE_REDIRECT)
.addChild(
builder(LocationDefinition.INSTANCE)
.addAttributes(LocationDefinition.HANDLER)
.addChild(filterRefBuilder())
).addChild(
builder(AccessLogDefinition.INSTANCE)
.addAttributes(
AccessLogDefinition.PATTERN,
AccessLogDefinition.WORKER,
AccessLogDefinition.DIRECTORY,
AccessLogDefinition.RELATIVE_TO,
AccessLogDefinition.PREFIX,
AccessLogDefinition.SUFFIX,
AccessLogDefinition.ROTATE,
AccessLogDefinition.USE_SERVER_LOG,
AccessLogDefinition.EXTENDED,
AccessLogDefinition.PREDICATE)
).addChild(filterRefBuilder())
.addChild(
builder(SingleSignOnDefinition.INSTANCE)
.addAttributes(SingleSignOnDefinition.DOMAIN, SingleSignOnDefinition.PATH, SingleSignOnDefinition.HTTP_ONLY, SingleSignOnDefinition.SECURE, SingleSignOnDefinition.COOKIE_NAME)
)
)
)
.addChild(
builder(ServletContainerDefinition.INSTANCE)
.addAttribute(ServletContainerDefinition.ALLOW_NON_STANDARD_WRAPPERS)
.addAttribute(ServletContainerDefinition.DEFAULT_BUFFER_CACHE)
.addAttribute(ServletContainerDefinition.STACK_TRACE_ON_ERROR)
.addAttribute(ServletContainerDefinition.USE_LISTENER_ENCODING)
.addAttribute(ServletContainerDefinition.DEFAULT_ENCODING)
.addAttribute(ServletContainerDefinition.IGNORE_FLUSH)
.addAttribute(ServletContainerDefinition.EAGER_FILTER_INIT)
.addAttribute(ServletContainerDefinition.DEFAULT_SESSION_TIMEOUT)
.addAttribute(ServletContainerDefinition.DISABLE_CACHING_FOR_SECURED_PAGES)
.addAttribute(ServletContainerDefinition.DIRECTORY_LISTING)
.addAttribute(ServletContainerDefinition.PROACTIVE_AUTHENTICATION)
.addAttribute(ServletContainerDefinition.SESSION_ID_LENGTH)
.addAttribute(ServletContainerDefinition.MAX_SESSIONS)
.addChild(
builder(JspDefinition.INSTANCE)
.setXmlElementName(Constants.JSP_CONFIG)
.addAttributes(
JspDefinition.DISABLED,
JspDefinition.DEVELOPMENT,
JspDefinition.KEEP_GENERATED,
JspDefinition.TRIM_SPACES,
JspDefinition.TAG_POOLING,
JspDefinition.MAPPED_FILE,
JspDefinition.CHECK_INTERVAL,
JspDefinition.MODIFICATION_TEST_INTERVAL,
JspDefinition.RECOMPILE_ON_FAIL,
JspDefinition.SMAP,
JspDefinition.DUMP_SMAP,
JspDefinition.GENERATE_STRINGS_AS_CHAR_ARRAYS,
JspDefinition.ERROR_ON_USE_BEAN_INVALID_CLASS_ATTRIBUTE,
JspDefinition.SCRATCH_DIR,
JspDefinition.SOURCE_VM,
JspDefinition.TARGET_VM,
JspDefinition.JAVA_ENCODING,
JspDefinition.X_POWERED_BY,
JspDefinition.DISPLAY_SOURCE_FRAGMENT,
JspDefinition.OPTIMIZE_SCRIPTLETS)
)
.addChild(
builder(SessionCookieDefinition.INSTANCE)
.addAttributes(
SessionCookieDefinition.NAME,
SessionCookieDefinition.DOMAIN,
SessionCookieDefinition.COMMENT,
SessionCookieDefinition.HTTP_ONLY,
SessionCookieDefinition.SECURE,
SessionCookieDefinition.MAX_AGE
)
)
.addChild(
builder(PersistentSessionsDefinition.INSTANCE)
.addAttributes(
PersistentSessionsDefinition.PATH,
PersistentSessionsDefinition.RELATIVE_TO
)
)
.addChild(
builder(WebsocketsDefinition.INSTANCE)
.addAttributes(
WebsocketsDefinition.WORKER,
WebsocketsDefinition.BUFFER_POOL,
WebsocketsDefinition.DISPATCH_TO_WORKER
)
)
.addChild(builder(MimeMappingDefinition.INSTANCE)
.setXmlWrapperElement("mime-mappings")
.addAttributes(
MimeMappingDefinition.VALUE
))
.addChild(builder(WelcomeFileDefinition.INSTANCE).setXmlWrapperElement("welcome-files"))
.addChild(builder(CrawlerSessionManagementDefinition.INSTANCE)
.addAttributes(CrawlerSessionManagementDefinition.USER_AGENTS, CrawlerSessionManagementDefinition.SESSION_TIMEOUT))
)
.addChild(
builder(HandlerDefinitions.INSTANCE)
.setXmlElementName(Constants.HANDLERS)
.setNoAddOperation(true)
.addChild(
builder(FileHandler.INSTANCE)
.addAttributes(
FileHandler.PATH,
FileHandler.CACHE_BUFFER_SIZE,
FileHandler.CACHE_BUFFERS,
FileHandler.DIRECTORY_LISTING,
FileHandler.FOLLOW_SYMLINK,
FileHandler.SAFE_SYMLINK_PATHS,
FileHandler.CASE_SENSITIVE
)
)
.addChild(
builder(ReverseProxyHandler.INSTANCE)
.addAttributes(
ReverseProxyHandler.CONNECTIONS_PER_THREAD,
ReverseProxyHandler.SESSION_COOKIE_NAMES,
ReverseProxyHandler.PROBLEM_SERVER_RETRY,
ReverseProxyHandler.MAX_REQUEST_TIME,
ReverseProxyHandler.REQUEST_QUEUE_SIZE,
ReverseProxyHandler.CACHED_CONNECTIONS_PER_THREAD,
ReverseProxyHandler.CONNECTION_IDLE_TIMEOUT)
.addChild(builder(ReverseProxyHandlerHost.INSTANCE)
.setXmlElementName(Constants.HOST)
.addAttributes(ReverseProxyHandlerHost.INSTANCE_ID, ReverseProxyHandlerHost.PATH, ReverseProxyHandlerHost.SCHEME, ReverseProxyHandlerHost.OUTBOUND_SOCKET_BINDING, ReverseProxyHandlerHost.SECURITY_REALM))
)
)
.addChild(
builder(FilterDefinitions.INSTANCE)
.setXmlElementName(Constants.FILTERS)
.setNoAddOperation(true)
.addChild(
builder(RequestLimitHandler.INSTANCE)
.addAttributes(RequestLimitHandler.MAX_CONCURRENT_REQUESTS, RequestLimitHandler.QUEUE_SIZE)
).addChild(
builder(ResponseHeaderFilter.INSTANCE)
.addAttributes(ResponseHeaderFilter.NAME, ResponseHeaderFilter.VALUE)
).addChild(
builder(GzipFilter.INSTANCE)
).addChild(
builder(ErrorPageDefinition.INSTANCE)
.addAttributes(ErrorPageDefinition.CODE, ErrorPageDefinition.PATH)
).addChild(
builder(ModClusterDefinition.INSTANCE)
.addAttributes(ModClusterDefinition.MANAGEMENT_SOCKET_BINDING,
ModClusterDefinition.ADVERTISE_SOCKET_BINDING,
ModClusterDefinition.SECURITY_KEY,
ModClusterDefinition.ADVERTISE_PROTOCOL,
ModClusterDefinition.ADVERTISE_PATH,
ModClusterDefinition.ADVERTISE_FREQUENCY,
ModClusterDefinition.HEALTH_CHECK_INTERVAL,
ModClusterDefinition.BROKEN_NODE_TIMEOUT,
ModClusterDefinition.WORKER,
ModClusterDefinition.MAX_REQUEST_TIME,
ModClusterDefinition.MANAGEMENT_ACCESS_PREDICATE,
ModClusterDefinition.CONNECTIONS_PER_THREAD,
ModClusterDefinition.CACHED_CONNECTIONS_PER_THREAD,
ModClusterDefinition.CONNECTION_IDLE_TIMEOUT,
ModClusterDefinition.REQUEST_QUEUE_SIZE,
ModClusterDefinition.SECURITY_REALM,
ModClusterDefinition.USE_ALIAS,
ModClusterDefinition.ENABLE_HTTP2,
ModClusterDefinition.MAX_AJP_PACKET_SIZE,
ModClusterDefinition.HTTP2_ENABLE_PUSH,
ModClusterDefinition.HTTP2_HEADER_TABLE_SIZE,
ModClusterDefinition.HTTP2_INITIAL_WINDOW_SIZE,
ModClusterDefinition.HTTP2_MAX_CONCURRENT_STREAMS,
ModClusterDefinition.HTTP2_MAX_HEADER_LIST_SIZE,
ModClusterDefinition.HTTP2_MAX_FRAME_SIZE)
).addChild(
builder(CustomFilterDefinition.INSTANCE)
.addAttributes(CustomFilterDefinition.CLASS_NAME, CustomFilterDefinition.MODULE, CustomFilterDefinition.PARAMETERS)
.setXmlElementName("filter")
).addChild(
builder(ExpressionFilterDefinition.INSTANCE)
.addAttributes(ExpressionFilterDefinition.EXPRESSION, ExpressionFilterDefinition.MODULE)
).addChild(
builder(RewriteFilterDefinition.INSTANCE)
.addAttributes(RewriteFilterDefinition.TARGET, RewriteFilterDefinition.REDIRECT)
)
)
//here to make sure we always add filters & handlers path to mgmt model
.setAdditionalOperationsGenerator(new PersistentResourceXMLDescription.AdditionalOperationsGenerator() {
@Override
public void additionalOperations(final PathAddress address, final ModelNode addOperation, final List<ModelNode> operations) {
operations.add(Util.createAddOperation(address.append(UndertowExtension.PATH_FILTERS)));
operations.add(Util.createAddOperation(address.append(UndertowExtension.PATH_HANDLERS)));
}
})
.build();
}
private UndertowSubsystemParser_3_1() {
}
@Override
public PersistentResourceXMLDescription getParserDescription() {
return xmlDescription;
}
/** Registers attributes common across listener types */
private static PersistentResourceXMLDescription.PersistentResourceXMLBuilder listenerBuilder(PersistentResourceDefinition resource) {
return builder(resource)
// xsd socket-optionsType
.addAttributes(
ListenerResourceDefinition.RECEIVE_BUFFER,
ListenerResourceDefinition.SEND_BUFFER,
ListenerResourceDefinition.BACKLOG,
ListenerResourceDefinition.KEEP_ALIVE,
ListenerResourceDefinition.READ_TIMEOUT,
ListenerResourceDefinition.WRITE_TIMEOUT,
ListenerResourceDefinition.MAX_CONNECTIONS)
// xsd listener-type
.addAttributes(
ListenerResourceDefinition.SOCKET_BINDING,
ListenerResourceDefinition.WORKER,
ListenerResourceDefinition.BUFFER_POOL,
ListenerResourceDefinition.ENABLED,
ListenerResourceDefinition.RESOLVE_PEER_ADDRESS,
ListenerResourceDefinition.MAX_ENTITY_SIZE,
ListenerResourceDefinition.BUFFER_PIPELINED_DATA,
ListenerResourceDefinition.MAX_HEADER_SIZE,
ListenerResourceDefinition.MAX_PARAMETERS,
ListenerResourceDefinition.MAX_HEADERS,
ListenerResourceDefinition.MAX_COOKIES,
ListenerResourceDefinition.ALLOW_ENCODED_SLASH,
ListenerResourceDefinition.DECODE_URL,
ListenerResourceDefinition.URL_CHARSET,
ListenerResourceDefinition.ALWAYS_SET_KEEP_ALIVE,
ListenerResourceDefinition.MAX_BUFFERED_REQUEST_SIZE,
ListenerResourceDefinition.RECORD_REQUEST_START_TIME,
ListenerResourceDefinition.ALLOW_EQUALS_IN_COOKIE_VALUE,
ListenerResourceDefinition.NO_REQUEST_TIMEOUT,
ListenerResourceDefinition.REQUEST_PARSE_TIMEOUT,
ListenerResourceDefinition.DISALLOWED_METHODS,
ListenerResourceDefinition.SECURE);
}
private static PersistentResourceXMLDescription.PersistentResourceXMLBuilder filterRefBuilder() {
return builder(FilterRefDefinition.INSTANCE)
.addAttributes(FilterRefDefinition.PREDICATE, FilterRefDefinition.PRIORITY);
}
}
|
package com.sop4j.dbutils;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.util.HashMap;
import java.util.Map;
import javax.persistence.Column;
import javax.persistence.Entity;
/**
* An abstract class that makes it easier to build EntityExecutors.
*/
public abstract class AbstractEntityExecutor<T extends AbstractEntityExecutor<T>> {
protected final Class<?> entity;
protected final Connection conn;
protected final String tableName;
protected final Map<String, Object> params = new HashMap<String, Object>();
/**
* Constructs the EntityExecutor.
*/
AbstractEntityExecutor(final Class<?> entity, final Connection conn) {
this.entity = entity;
this.conn = conn;
final Entity annotation = entity.getAnnotation(Entity.class);
if(annotation == null) {
throw new IllegalArgumentException(entity.getName() + " does not have the Entity annotation");
}
// get the table's name
tableName = EntityUtils.getTableName(entity);
}
/**
* Bind properties to values before execution.
* @param property the property to bind.
* @param value the value to bind.
* @return this.
*/
public T bind(final String property, final Object value) {
if(property == null || property.length() == 0) {
throw new IllegalArgumentException("Property is null or blank string");
}
boolean found = false;
for(Field field:entity.getDeclaredFields()) {
// check to see if the property is a field
if(field.getName().equals(property)) {
params.put(property, value);
found = true;
break;
} else {
// we also check the column name
final Column column = field.getAnnotation(Column.class);
if(column != null && column.name().equals(property)) {
params.put(property, value);
found = true;
break;
}
}
}
if(!found) {
throw new IllegalArgumentException(property + " is not a property of the entity " + entity.getName());
}
return (T)this;
}
}
|
package com.intellij.compiler.impl;
import com.intellij.CommonBundle;
import com.intellij.analysis.AnalysisScope;
import com.intellij.compiler.*;
import com.intellij.compiler.make.CacheCorruptedException;
import com.intellij.compiler.make.DependencyCache;
import com.intellij.compiler.make.MakeUtil;
import com.intellij.compiler.progress.CompilerTask;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.compiler.*;
import com.intellij.openapi.compiler.Compiler;
import com.intellij.openapi.compiler.ex.CompilerPathsEx;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.ProjectJdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.ex.ProjectRootManagerEx;
import com.intellij.openapi.roots.ui.configuration.ClasspathEditor;
import com.intellij.openapi.roots.ui.configuration.ContentEntriesEditor;
import com.intellij.openapi.roots.ui.configuration.ModulesConfigurator;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.newvfs.RefreshQueue;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.packageDependencies.DependenciesBuilder;
import com.intellij.packageDependencies.ForwardDependenciesBuilder;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.PsiCompiledElement;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.Chunk;
import com.intellij.util.LocalTimeCounter;
import com.intellij.util.ProfilingUtil;
import com.intellij.util.StringBuilderSpinAllocator;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NonNls;
import java.io.*;
import java.util.*;
public class CompileDriver {
private static final Logger LOG = Logger.getInstance("#com.intellij.compiler.impl.CompileDriver");
private final Project myProject;
private final Map<Compiler, Object> myCompilerToCacheMap = new HashMap<Compiler, Object>();
private Map<Pair<GeneratingCompiler, Module>, Pair<VirtualFile, VirtualFile>> myGenerationCompilerModuleToOutputDirMap; // [Compiler, Module] -> [ProductionSources, TestSources]
private String myCachesDirectoryPath;
private boolean myShouldClearOutputDirectory;
private Map<Module, String> myModuleOutputPaths = new HashMap<Module, String>();
private Map<Module, String> myModuleTestOutputPaths = new HashMap<Module, String>();
private ProjectRootManager myProjectRootManager;
private static final @NonNls String VERSION_FILE_NAME = "version.dat";
private static final @NonNls String LOCK_FILE_NAME = "in_progress.dat";
private static final @NonNls boolean GENERATE_CLASSPATH_INDEX = "true".equals(System.getProperty("generate.classpath.index"));
private final FileProcessingCompilerAdapterFactory myProcessingCompilerAdapterFactory;
private final FileProcessingCompilerAdapterFactory myPackagingCompilerAdapterFactory;
private final FileProcessingCompilerAdapterFactory myFixedTimestampCompilerAdapterFactory;
public CompileDriver(Project project) {
myProject = project;
myCachesDirectoryPath = CompilerPaths.getCacheStoreDirectory(myProject).getPath().replace('/', File.separatorChar);
myShouldClearOutputDirectory = CompilerWorkspaceConfiguration.getInstance(myProject).CLEAR_OUTPUT_DIRECTORY;
myGenerationCompilerModuleToOutputDirMap = new com.intellij.util.containers.HashMap<Pair<GeneratingCompiler, Module>, Pair<VirtualFile, VirtualFile>>();
final GeneratingCompiler[] generatingCompilers = CompilerManager.getInstance(myProject).getCompilers(GeneratingCompiler.class);
if (generatingCompilers.length > 0) {
final Module[] allModules = ModuleManager.getInstance(myProject).getModules();
for (GeneratingCompiler compiler : generatingCompilers) {
for (final Module module : allModules) {
final VirtualFile productionOutput = lookupVFile(compiler, module, false);
final VirtualFile testOutput = lookupVFile(compiler, module, true);
final Pair<GeneratingCompiler, Module> pair = new Pair<GeneratingCompiler, Module>(compiler, module);
final Pair<VirtualFile, VirtualFile> outputs = new Pair<VirtualFile, VirtualFile>(productionOutput, testOutput);
myGenerationCompilerModuleToOutputDirMap.put(pair, outputs);
}
}
}
myProjectRootManager = ProjectRootManager.getInstance(myProject);
myProcessingCompilerAdapterFactory = new FileProcessingCompilerAdapterFactory() {
public FileProcessingCompilerAdapter create(CompileContext context, FileProcessingCompiler compiler) {
return new FileProcessingCompilerAdapter(context, compiler);
}
};
myPackagingCompilerAdapterFactory = new FileProcessingCompilerAdapterFactory() {
public FileProcessingCompilerAdapter create(CompileContext context, FileProcessingCompiler compiler) {
return new PackagingCompilerAdapter(context, (PackagingCompiler)compiler);
}
};
myFixedTimestampCompilerAdapterFactory = new FileProcessingCompilerAdapterFactory() {
public FileProcessingCompilerAdapter create(CompileContext context, FileProcessingCompiler compiler) {
return new FixedTimestampCompilerAdapter(context, compiler);
}
};
}
public void rebuild(CompileStatusNotification callback) {
doRebuild(callback, null, true, addAdditionalRoots(new ProjectCompileScope(myProject)));
}
public void make(CompileScope scope, CompileStatusNotification callback) {
scope = addAdditionalRoots(scope);
if (validateCompilerConfiguration(scope, false)) {
startup(scope, false, false, callback, null, true, false);
}
}
public boolean isUpToDate(CompileScope scope) {
if (LOG.isDebugEnabled()) {
LOG.debug("isUpToDate operation started");
}
scope = addAdditionalRoots(scope);
final CompilerTask task = new CompilerTask(myProject, true, "", true);
final CompileContextImpl compileContext =
new CompileContextImpl(myProject, task, scope, new DependencyCache(myCachesDirectoryPath), this, true);
checkCachesVersion(compileContext);
if (compileContext.isRebuildRequested()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Rebuild requested, up-to-date=false");
}
return false;
}
for (Pair<GeneratingCompiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) {
final Pair<VirtualFile, VirtualFile> outputs = myGenerationCompilerModuleToOutputDirMap.get(pair);
compileContext.assignModule(outputs.getFirst(), pair.getSecond(), false);
compileContext.assignModule(outputs.getSecond(), pair.getSecond(), true);
}
final Ref<ExitStatus> status = new Ref<ExitStatus>();
task.start(new Runnable() {
public void run() {
status.set(doCompile(compileContext, false, false, false, getAllOutputDirectories(), true));
}
});
if (LOG.isDebugEnabled()) {
LOG.debug("isUpToDate operation finished");
}
return ExitStatus.UP_TO_DATE.equals(status.get());
}
public void compile(CompileScope scope, CompileStatusNotification callback, boolean trackDependencies) {
if (trackDependencies) {
scope = new TrackDependenciesScope(scope);
}
if (validateCompilerConfiguration(scope, false)) {
startup(scope, false, true, callback, null, true, trackDependencies);
}
}
private static class CompileStatus {
final int CACHE_FORMAT_VERSION;
final boolean COMPILATION_IN_PROGRESS;
public CompileStatus(int cacheVersion, boolean isCompilationInProgress) {
CACHE_FORMAT_VERSION = cacheVersion;
COMPILATION_IN_PROGRESS = isCompilationInProgress;
}
}
private CompileStatus readStatus() {
final boolean isInProgress = new File(myCachesDirectoryPath, LOCK_FILE_NAME).exists();
int version = -1;
try {
final File versionFile = new File(myCachesDirectoryPath, VERSION_FILE_NAME);
DataInputStream in = new DataInputStream(new FileInputStream(versionFile));
try {
version = in.readInt();
}
finally {
in.close();
}
}
catch (FileNotFoundException e) {
// ignore
}
catch (IOException e) {
LOG.info(e); // may happen in case of IDEA crashed and the file is not written properly
return null;
}
return new CompileStatus(version, isInProgress);
}
private void writeStatus(CompileStatus status, CompileContext context) {
final File statusFile = new File(myCachesDirectoryPath, VERSION_FILE_NAME);
final File lockFile = new File(myCachesDirectoryPath, LOCK_FILE_NAME);
try {
statusFile.createNewFile();
DataOutputStream out = new DataOutputStream(new FileOutputStream(statusFile));
try {
out.writeInt(status.CACHE_FORMAT_VERSION);
}
finally {
out.close();
}
if (status.COMPILATION_IN_PROGRESS) {
lockFile.createNewFile();
}
else {
lockFile.delete();
}
}
catch (IOException e) {
context.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.exception", e.getMessage()), null, -1, -1);
}
}
private void doRebuild(CompileStatusNotification callback,
CompilerMessage message,
final boolean checkCachesVersion,
final CompileScope compileScope) {
if (validateCompilerConfiguration(compileScope, true)) {
startup(compileScope, true, false, callback, message, checkCachesVersion, false);
}
}
private CompileScope addAdditionalRoots(CompileScope originalScope) {
CompileScope scope = originalScope;
for (final Pair<GeneratingCompiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) {
final Pair<VirtualFile, VirtualFile> outputs = myGenerationCompilerModuleToOutputDirMap.get(pair);
scope = new CompositeScope(scope, new FileSetCompileScope(new VirtualFile[]{outputs.getFirst(), outputs.getSecond()}, new Module[]{pair.getSecond()}));
}
final AdditionalCompileScopeProvider[] scopeProviders = Extensions.getExtensions(AdditionalCompileScopeProvider.EXTENSION_POINT_NAME);
CompileScope baseScope = scope;
for (AdditionalCompileScopeProvider scopeProvider : scopeProviders) {
final CompileScope additionalScope = scopeProvider.getAdditionalScope(baseScope);
if (additionalScope != null) {
scope = new CompositeScope(scope, additionalScope);
}
}
return scope;
}
public static final Key<Long> COMPILATION_START_TIMESTAMP = Key.create("COMPILATION_START_TIMESTAMP");
private void startup(final CompileScope scope,
final boolean isRebuild,
final boolean forceCompile,
final CompileStatusNotification callback,
final CompilerMessage message,
final boolean checkCachesVersion,
final boolean trackDependencies) {
final CompilerTask compileTask = new CompilerTask(myProject, CompilerWorkspaceConfiguration.getInstance(myProject).COMPILE_IN_BACKGROUND,
forceCompile
? CompilerBundle.message("compiler.content.name.compile")
: CompilerBundle.message("compiler.content.name.make"), false);
final WindowManager windowManager = WindowManager.getInstance();
if (windowManager != null) {
windowManager.getStatusBar(myProject).setInfo("");
}
final DependencyCache dependencyCache = new DependencyCache(myCachesDirectoryPath);
final CompileContextImpl compileContext =
new CompileContextImpl(myProject, compileTask, scope, dependencyCache, this, !isRebuild && !forceCompile);
compileContext.putUserData(COMPILATION_START_TIMESTAMP, LocalTimeCounter.currentTime());
for (Pair<GeneratingCompiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) {
final Pair<VirtualFile, VirtualFile> outputs = myGenerationCompilerModuleToOutputDirMap.get(pair);
compileContext.assignModule(outputs.getFirst(), pair.getSecond(), false);
compileContext.assignModule(outputs.getSecond(), pair.getSecond(), true);
}
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
FileDocumentManager.getInstance().saveAllDocuments();
compileTask.start(new Runnable() {
public void run() {
try {
if (myProject.isDisposed()) {
return;
}
if (LOG.isDebugEnabled()) {
LOG.debug("COMPILATION STARTED");
}
if (message != null) {
compileContext.addMessage(message);
}
doCompile(compileContext, isRebuild, forceCompile, callback, checkCachesVersion, trackDependencies);
}
finally {
if (LOG.isDebugEnabled()) {
LOG.debug("COMPILATION FINISHED");
}
}
}
});
}
private void doCompile(final CompileContextImpl compileContext,
final boolean isRebuild,
final boolean forceCompile,
final CompileStatusNotification callback,
final boolean checkCachesVersion,
final boolean trackDependencies) {
ExitStatus status = ExitStatus.ERRORS;
boolean wereExceptions = false;
try {
compileContext.getProgressIndicator().pushState();
if (checkCachesVersion) {
checkCachesVersion(compileContext);
if (compileContext.isRebuildRequested()) {
return;
}
}
writeStatus(new CompileStatus(CompilerConfigurationImpl.DEPENDENCY_FORMAT_VERSION, true), compileContext);
if (compileContext.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
return;
}
status = doCompile(compileContext, isRebuild, forceCompile, trackDependencies, getAllOutputDirectories(), false);
}
catch (Throwable ex) {
wereExceptions = true;
throw new RuntimeException(ex);
}
finally {
dropDependencyCache(compileContext);
compileContext.getProgressIndicator().popState();
final ExitStatus _status = status;
if (compileContext.isRebuildRequested()) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
doRebuild(callback, new CompilerMessageImpl(myProject, CompilerMessageCategory.INFORMATION, compileContext.getRebuildReason(),
null, -1, -1, null), false, compileContext.getCompileScope());
}
}, ModalityState.NON_MODAL);
}
else {
writeStatus(new CompileStatus(CompilerConfigurationImpl.DEPENDENCY_FORMAT_VERSION, wereExceptions), compileContext);
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
final int errorCount = compileContext.getMessageCount(CompilerMessageCategory.ERROR);
final int warningCount = compileContext.getMessageCount(CompilerMessageCategory.WARNING);
final String statusMessage = createStatusMessage(_status, warningCount, errorCount);
final StatusBar statusBar = WindowManager.getInstance().getStatusBar(myProject);
if (statusBar != null) { // because this code is in invoke later, the code may work for already closed project
// in case another project was opened in the frame while the compiler was working (See SCR# 28591)
statusBar.setInfo(statusMessage);
}
if (_status != ExitStatus.UP_TO_DATE && compileContext.getMessageCount(null) > 0) {
compileContext.addMessage(CompilerMessageCategory.INFORMATION, statusMessage, null, -1, -1);
}
if (callback != null) {
callback.finished(_status == ExitStatus.CANCELLED, errorCount, warningCount, compileContext);
}
ProfilingUtil.operationFinished("make");
}
}, ModalityState.NON_MODAL);
}
}
}
private void checkCachesVersion(final CompileContextImpl compileContext) {
final CompileStatus compileStatus = readStatus();
if (compileStatus == null) {
compileContext.requestRebuildNextTime(CompilerBundle.message("error.compiler.caches.corrupted"));
}
else if (compileStatus.CACHE_FORMAT_VERSION != -1 &&
compileStatus.CACHE_FORMAT_VERSION != CompilerConfigurationImpl.DEPENDENCY_FORMAT_VERSION) {
compileContext.requestRebuildNextTime(CompilerBundle.message("error.caches.old.format"));
}
else if (compileStatus.COMPILATION_IN_PROGRESS) {
compileContext.requestRebuildNextTime(CompilerBundle.message("error.previous.compilation.failed"));
}
}
private static String createStatusMessage(final ExitStatus status, final int warningCount, final int errorCount) {
if (status == ExitStatus.CANCELLED) {
return CompilerBundle.message("status.compilation.aborted");
}
if (status == ExitStatus.UP_TO_DATE) {
return CompilerBundle.message("status.all.up.to.date");
}
if (status == ExitStatus.SUCCESS) {
return warningCount > 0
? CompilerBundle.message("status.compilation.completed.successfully.with.warnings", warningCount)
: CompilerBundle.message("status.compilation.completed.successfully");
}
return CompilerBundle.message("status.compilation.completed.successfully.with.warnings.and.errors", errorCount, warningCount);
}
private static class ExitStatus {
private String myName;
private ExitStatus(@NonNls String name) {
myName = name;
}
public String toString() {
return myName;
}
public static final ExitStatus CANCELLED = new ExitStatus("CANCELLED");
public static final ExitStatus ERRORS = new ExitStatus("ERRORS");
public static final ExitStatus SUCCESS = new ExitStatus("SUCCESS");
public static final ExitStatus UP_TO_DATE = new ExitStatus("UP_TO_DATE");
}
private static class ExitException extends Exception {
private final ExitStatus myStatus;
public ExitException(ExitStatus status) {
myStatus = status;
}
public ExitStatus getExitStatus() {
return myStatus;
}
}
private ExitStatus doCompile(CompileContextImpl context,
boolean isRebuild,
final boolean forceCompile,
final boolean trackDependencies,
final Set<File> outputDirectories,
final boolean onlyCheckStatus) {
try {
if (isRebuild) {
deleteAll(context, outputDirectories);
if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
if (LOG.isDebugEnabled()) {
logErrorMessages(context);
}
return ExitStatus.ERRORS;
}
}
if (!onlyCheckStatus) {
try {
context.getProgressIndicator().pushState();
if (!executeCompileTasks(context, true)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Compilation cancelled");
}
return ExitStatus.CANCELLED;
}
}
finally {
context.getProgressIndicator().popState();
}
}
if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
if (LOG.isDebugEnabled()) {
logErrorMessages(context);
}
return ExitStatus.ERRORS;
}
// need this to make sure the VFS is built
final List<VirtualFile> outputsToRefresh = new ArrayList<VirtualFile>();
for (VirtualFile output : context.getAllOutputDirectories()) {
walkChildren(output);
outputsToRefresh.add(output);
}
for (Pair<GeneratingCompiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) {
final Pair<VirtualFile, VirtualFile> generated = myGenerationCompilerModuleToOutputDirMap.get(pair);
walkChildren(generated.getFirst());
outputsToRefresh.add(generated.getFirst());
walkChildren(generated.getSecond());
outputsToRefresh.add(generated.getSecond());
}
RefreshQueue.getInstance().refresh(false, true, null, outputsToRefresh.toArray(new VirtualFile[outputsToRefresh.size()]));
boolean didSomething = false;
final CompilerManager compilerManager = CompilerManager.getInstance(myProject);
try {
didSomething |= generateSources(compilerManager, context, forceCompile, onlyCheckStatus);
didSomething |= invokeFileProcessingCompilers(compilerManager, context, SourceInstrumentingCompiler.class,
myProcessingCompilerAdapterFactory, forceCompile, true, onlyCheckStatus);
didSomething |= translate(context, compilerManager, forceCompile, isRebuild, trackDependencies, outputDirectories, onlyCheckStatus);
didSomething |= invokeFileProcessingCompilers(compilerManager, context, ClassInstrumentingCompiler.class,
myFixedTimestampCompilerAdapterFactory, isRebuild, false, onlyCheckStatus);
// explicitly passing forceCompile = false because in scopes that is narrower than ProjectScope it is impossible
// to understand whether the class to be processed is in scope or not. Otherwise compiler may process its items even if
// there were changes in completely independent files.
didSomething |= invokeFileProcessingCompilers(compilerManager, context, ClassPostProcessingCompiler.class,
myFixedTimestampCompilerAdapterFactory, isRebuild, false, onlyCheckStatus);
didSomething |= invokeFileProcessingCompilers(compilerManager, context, PackagingCompiler.class, myPackagingCompilerAdapterFactory,
isRebuild, true, onlyCheckStatus);
didSomething |= invokeFileProcessingCompilers(compilerManager, context, Validator.class, myProcessingCompilerAdapterFactory,
forceCompile, true, onlyCheckStatus);
}
catch (ExitException e) {
if (LOG.isDebugEnabled()) {
LOG.debug(e);
logErrorMessages(context);
}
return e.getExitStatus();
}
finally {
// drop in case it has not been dropped yet.
dropDependencyCache(context);
final VirtualFile[] allOutputDirs = context.getAllOutputDirectories();
if (didSomething && GENERATE_CLASSPATH_INDEX) {
context.getProgressIndicator().pushState();
context.getProgressIndicator().setText("Generating classpath index...");
int count = 0;
for (VirtualFile file : allOutputDirs) {
context.getProgressIndicator().setFraction(((double)++count) / allOutputDirs.length);
createClasspathIndex(file);
}
context.getProgressIndicator().popState();
}
if (!context.getProgressIndicator().isCanceled() && context.getMessageCount(CompilerMessageCategory.ERROR) == 0) {
RefreshQueue.getInstance().refresh(true, true, new Runnable() {
public void run() {
CompilerDirectoryTimestamp.updateTimestamp(Arrays.asList(allOutputDirs));
}
}, allOutputDirs);
}
}
if (!onlyCheckStatus) {
try {
context.getProgressIndicator().pushState();
if (!executeCompileTasks(context, false)) {
return ExitStatus.CANCELLED;
}
}
finally {
context.getProgressIndicator().popState();
}
}
if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
if (LOG.isDebugEnabled()) {
logErrorMessages(context);
}
return ExitStatus.ERRORS;
}
if (!didSomething) {
return ExitStatus.UP_TO_DATE;
}
return ExitStatus.SUCCESS;
}
catch (ProcessCanceledException e) {
return ExitStatus.CANCELLED;
}
}
private static void logErrorMessages(final CompileContextImpl context) {
final CompilerMessage[] errors = context.getMessages(CompilerMessageCategory.ERROR);
if (errors.length > 0) {
LOG.debug("There were errors while deleting output directories");
for (CompilerMessage error : errors) {
LOG.debug("\t" + error.getMessage());
}
}
}
private static void walkChildren(VirtualFile from) {
final VirtualFile[] files = from.getChildren();
if (files != null && files.length > 0) {
for (VirtualFile file : files) {
walkChildren(file);
}
}
}
private static void createClasspathIndex(final VirtualFile file) {
try {
BufferedWriter writer = new BufferedWriter(new FileWriter(new File(VfsUtil.virtualToIoFile(file), "classpath.index")));
try {
writeIndex(writer, file, file);
}
finally {
writer.close();
}
}
catch (IOException e) {
// Ignore. Failed to create optional classpath index
}
}
private static void writeIndex(final BufferedWriter writer, final VirtualFile root, final VirtualFile file) throws IOException {
writer.write(VfsUtil.getRelativePath(file, root, '/'));
writer.write('\n');
for (VirtualFile child : file.getChildren()) {
writeIndex(writer, root, child);
}
}
private static void dropDependencyCache(final CompileContextImpl context) {
context.getProgressIndicator().pushState();
try {
context.getProgressIndicator().setText(CompilerBundle.message("progress.saving.caches"));
context.getDependencyCache().dispose();
}
finally {
context.getProgressIndicator().popState();
}
}
private boolean generateSources(final CompilerManager compilerManager,
CompileContextImpl context,
final boolean forceCompile,
final boolean onlyCheckStatus) throws ExitException {
boolean didSomething = false;
final SourceGeneratingCompiler[] sourceGenerators = compilerManager.getCompilers(SourceGeneratingCompiler.class);
for (final SourceGeneratingCompiler sourceGenerator : sourceGenerators) {
if (context.getProgressIndicator().isCanceled()) {
throw new ExitException(ExitStatus.CANCELLED);
}
final boolean generatedSomething = generateOutput(context, sourceGenerator, forceCompile, onlyCheckStatus);
dropInternalCache(sourceGenerator);
if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
throw new ExitException(ExitStatus.ERRORS);
}
didSomething |= generatedSomething;
}
return didSomething;
}
private boolean translate(final CompileContextImpl context,
final CompilerManager compilerManager,
final boolean forceCompile,
boolean isRebuild,
final boolean trackDependencies,
final Set<File> outputDirectories,
final boolean onlyCheckStatus) throws ExitException {
boolean didSomething = false;
final TranslatingCompiler[] translators = compilerManager.getCompilers(TranslatingCompiler.class);
final VfsSnapshot snapshot = ApplicationManager.getApplication().runReadAction(new Computable<VfsSnapshot>() {
public VfsSnapshot compute() {
return new VfsSnapshot(context.getCompileScope().getFiles(null, true));
}
});
for (final TranslatingCompiler translator : translators) {
if (context.getProgressIndicator().isCanceled()) {
throw new ExitException(ExitStatus.CANCELLED);
}
final boolean compiledSomething =
compileSources(context, snapshot, translator, forceCompile, isRebuild, trackDependencies, outputDirectories, onlyCheckStatus);
// free memory earlier to leave other compilers more space
dropDependencyCache(context);
dropInternalCache(translator);
if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
throw new ExitException(ExitStatus.ERRORS);
}
didSomething |= compiledSomething;
}
return didSomething;
}
private static interface FileProcessingCompilerAdapterFactory {
FileProcessingCompilerAdapter create(CompileContext context, FileProcessingCompiler compiler);
}
private boolean invokeFileProcessingCompilers(final CompilerManager compilerManager,
CompileContextImpl context,
Class<? extends FileProcessingCompiler> fileProcessingCompilerClass,
FileProcessingCompilerAdapterFactory factory,
boolean forceCompile,
final boolean checkScope,
final boolean onlyCheckStatus) throws ExitException {
LOG.assertTrue(FileProcessingCompiler.class.isAssignableFrom(fileProcessingCompilerClass));
boolean didSomething = false;
final FileProcessingCompiler[] compilers = compilerManager.getCompilers(fileProcessingCompilerClass);
if (compilers.length > 0) {
try {
for (final FileProcessingCompiler compiler : compilers) {
if (context.getProgressIndicator().isCanceled()) {
throw new ExitException(ExitStatus.CANCELLED);
}
final boolean processedSomething = processFiles(factory.create(context, compiler), forceCompile, checkScope, onlyCheckStatus);
dropInternalCache(compiler);
if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
throw new ExitException(ExitStatus.ERRORS);
}
didSomething |= processedSomething;
}
}
catch (ProcessCanceledException e) {
throw e;
}
catch (ExitException e) {
throw e;
}
catch (Exception e) {
context.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.exception", e.getMessage()), null, -1, -1);
LOG.error(e);
}
}
return didSomething;
}
private static Map<Module, Set<GeneratingCompiler.GenerationItem>> buildModuleToGenerationItemMap(GeneratingCompiler.GenerationItem[] items) {
final Map<Module, Set<GeneratingCompiler.GenerationItem>> map = new HashMap<Module, Set<GeneratingCompiler.GenerationItem>>();
for (GeneratingCompiler.GenerationItem item : items) {
Module module = item.getModule();
LOG.assertTrue(module != null);
Set<GeneratingCompiler.GenerationItem> itemSet = map.get(module);
if (itemSet == null) {
itemSet = new HashSet<GeneratingCompiler.GenerationItem>();
map.put(module, itemSet);
}
itemSet.add(item);
}
return map;
}
private void deleteAll(final CompileContext context, Set<File> outputDirectories) {
context.getProgressIndicator().pushState();
try {
final boolean isTestMode = ApplicationManager.getApplication().isUnitTestMode();
final Compiler[] allCompilers = CompilerManager.getInstance(myProject).getCompilers(Compiler.class);
context.getProgressIndicator().setText(CompilerBundle.message("progress.clearing.output"));
for (final Compiler compiler : allCompilers) {
if (compiler instanceof GeneratingCompiler) {
final StateCache<ValidityState> cache = getGeneratingCompilerCache((GeneratingCompiler)compiler);
if (!myShouldClearOutputDirectory) {
final Iterator<String> urlIterator = cache.getUrlsIterator();
while (urlIterator.hasNext()) {
new File(VirtualFileManager.extractPath(urlIterator.next())).delete();
}
}
cache.wipe();
}
else if (compiler instanceof FileProcessingCompiler) {
final FileProcessingCompilerStateCache cache = getFileProcessingCompilerCache((FileProcessingCompiler)compiler);
cache.wipe();
}
else if (compiler instanceof TranslatingCompiler) {
final TranslatingCompilerStateCache cache = getTranslatingCompilerCache((TranslatingCompiler)compiler);
if (!myShouldClearOutputDirectory) {
final Iterator<String> urlIterator = cache.getOutputUrlsIterator();
while (urlIterator.hasNext()) {
final String outputPath = urlIterator.next();
final String sourceUrl = cache.getSourceUrl(outputPath);
if (sourceUrl == null || !FileUtil.pathsEqual(outputPath, VirtualFileManager.extractPath(sourceUrl))) {
new File(outputPath).delete();
if (isTestMode) {
CompilerManagerImpl.addDeletedPath(outputPath);
}
}
}
}
cache.wipe();
}
}
if (myShouldClearOutputDirectory) {
clearOutputDirectories(outputDirectories);
}
else { // refresh is still required
pruneEmptyDirectories(outputDirectories); // to avoid too much files deleted events
for (final VirtualFile outputDirectory : CompilerPathsEx.getOutputDirectories(ModuleManager.getInstance(myProject).getModules())) {
outputDirectory.refresh(false, true);
}
}
dropScopesCaches();
clearCompilerSystemDirectory(context);
}
finally {
context.getProgressIndicator().popState();
}
}
private void dropScopesCaches() {
// hack to be sure the classpath will include the output directories
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
((ProjectRootManagerEx)ProjectRootManager.getInstance(myProject)).clearScopesCachesForModules();
}
});
}
private static void pruneEmptyDirectories(final Set<File> directories) {
for (File directory : directories) {
doPrune(directory, directories);
}
}
private static boolean doPrune(final File directory, final Set<File> outPutDirectories) {
final File[] files = directory.listFiles();
boolean isEmpty = true;
if (files != null) {
for (File file : files) {
if (file.isDirectory() && !outPutDirectories.contains(file)) {
if (doPrune(file, outPutDirectories)) {
file.delete();
}
else {
isEmpty = false;
}
}
else {
isEmpty = false;
}
}
}
return isEmpty;
}
private Set<File> getAllOutputDirectories() {
final Set<File> outputDirs = new THashSet<File>();
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
final VirtualFile[] outputDirectories = CompilerPathsEx.getOutputDirectories(ModuleManager.getInstance(myProject).getModules());
for (final VirtualFile outputDirectory : outputDirectories) {
final File directory = VfsUtil.virtualToIoFile(outputDirectory);
outputDirs.add(directory);
}
}
});
return outputDirs;
}
private void clearOutputDirectories(final Set<File> _outputDirectories) {
// do not delete directories themselves, or we'll get rootsChanged() otherwise
final List<File> outputDirectories = new ArrayList<File>(_outputDirectories);
for (Pair<GeneratingCompiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) {
outputDirectories.add(new File(getGenerationOutputPath(pair.getFirst(), pair.getSecond(), false)));
outputDirectories.add(new File(getGenerationOutputPath(pair.getFirst(), pair.getSecond(), true)));
}
Collection<File> filesToDelete = new ArrayList<File>(outputDirectories.size() * 2);
for (File outputDirectory : outputDirectories) {
File[] files = outputDirectory.listFiles();
if (files != null) {
filesToDelete.addAll(Arrays.asList(files));
}
}
FileUtil.asyncDelete(filesToDelete);
// ensure output directories exist, create and refresh if not exist
final List<File> createdFiles = new ArrayList<File>(outputDirectories.size());
for (final File file : outputDirectories) {
if (file.mkdirs()) {
createdFiles.add(file);
}
}
CompilerUtil.refreshIOFiles(createdFiles);
}
private void clearCompilerSystemDirectory(final CompileContext context) {
final File[] children = new File(myCachesDirectoryPath).listFiles();
if (children != null) {
for (final File child : children) {
final boolean deleteOk = FileUtil.delete(child);
if (!deleteOk) {
context.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.failed.to.delete", child.getPath()),
null, -1, -1);
}
}
}
for (Pair<GeneratingCompiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) {
final File[] outputs = {
new File(getGenerationOutputPath(pair.getFirst(), pair.getSecond(), false)),
new File(getGenerationOutputPath(pair.getFirst(), pair.getSecond(), true))
};
for (File output : outputs) {
final File[] files = output.listFiles();
if (files != null) {
for (final File file : files) {
final boolean deleteOk = FileUtil.delete(file);
if (!deleteOk) {
context.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.failed.to.delete", file.getPath()),
null, -1, -1);
}
}
}
}
}
}
private VirtualFile getGenerationOutputDir(final GeneratingCompiler compiler, final Module module, final boolean forTestSources) {
final Pair<VirtualFile, VirtualFile> outputs =
myGenerationCompilerModuleToOutputDirMap.get(new Pair<GeneratingCompiler, Module>(compiler, module));
return forTestSources? outputs.getSecond() : outputs.getFirst();
}
private static @NonNls String getGenerationOutputPath(GeneratingCompiler compiler, Module module, final boolean forTestSources) {
final String generatedCompilerDirectoryPath = CompilerPaths.getGeneratedDataDirectory(module.getProject(), compiler).getPath();
final String moduleDir = module.getName().replace(' ', '_') + "." + Integer.toHexString(module.getModuleFilePath().hashCode());
return generatedCompilerDirectoryPath.replace(File.separatorChar, '/') + "/" + moduleDir + "/" + (forTestSources? "test" : "production");
}
private boolean generateOutput(final CompileContextImpl context,
final GeneratingCompiler compiler,
final boolean forceGenerate,
final boolean onlyCheckStatus) throws ExitException {
final GeneratingCompiler.GenerationItem[] allItems = compiler.getGenerationItems(context);
final List<GeneratingCompiler.GenerationItem> toGenerate = new ArrayList<GeneratingCompiler.GenerationItem>();
final StateCache<ValidityState> cache = getGeneratingCompilerCache(compiler);
final Set<String> pathsToRemove = new HashSet<String>(Arrays.asList(cache.getUrls()));
final Map<GeneratingCompiler.GenerationItem, String> itemToOutputPathMap = new HashMap<GeneratingCompiler.GenerationItem, String>();
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
for (final GeneratingCompiler.GenerationItem item : allItems) {
final Module itemModule = item.getModule();
final String outputDirPath = getGenerationOutputPath(compiler, itemModule, item.isTestSource());
final String outputPath = outputDirPath + "/" + item.getPath();
itemToOutputPathMap.put(item, outputPath);
final ValidityState savedState = cache.getState(outputPath);
if (forceGenerate || savedState == null || !savedState.equalsTo(item.getValidityState())) {
if (context.getCompileScope().belongs(outputPath)) {
toGenerate.add(item);
}
else {
pathsToRemove.remove(outputPath);
}
}
else {
pathsToRemove.remove(outputPath);
}
}
}
});
if (onlyCheckStatus) {
if ((toGenerate.isEmpty() && pathsToRemove.isEmpty())) {
return false;
}
if (LOG.isDebugEnabled()) {
if (!toGenerate.isEmpty()) {
LOG.debug("Found items to generate, compiler " + compiler.getDescription());
}
if (!pathsToRemove.isEmpty()) {
LOG.debug("Found paths to remove, compiler " + compiler.getDescription());
}
}
throw new ExitException(ExitStatus.CANCELLED);
}
final List<File> filesToRefresh = new ArrayList<File>();
final List<File> generatedFiles = new ArrayList<File>();
final List<Module> affectedModules = new ArrayList<Module>();
try {
if (!pathsToRemove.isEmpty()) {
context.getProgressIndicator().pushState();
context.getProgressIndicator().setText(CompilerBundle.message("progress.synchronizing.output.directory"));
for (final String path : pathsToRemove) {
final File file = new File(path);
final boolean deleted = file.delete();
if (deleted) {
cache.remove(path);
filesToRefresh.add(file);
}
}
context.getProgressIndicator().popState();
}
Map<Module, Set<GeneratingCompiler.GenerationItem>> moduleToItemMap =
buildModuleToGenerationItemMap(toGenerate.toArray(new GeneratingCompiler.GenerationItem[toGenerate.size()]));
List<Module> modules = new ArrayList<Module>(moduleToItemMap.size());
for (final Module module : moduleToItemMap.keySet()) {
modules.add(module);
}
ModuleCompilerUtil.sortModules(myProject, modules);
for (final Module module : modules) {
context.getProgressIndicator().pushState();
try {
final Set<GeneratingCompiler.GenerationItem> items = moduleToItemMap.get(module);
if (items != null && !items.isEmpty()) {
final GeneratingCompiler.GenerationItem[][] productionAndTestItems = splitGenerationItems(items);
boolean moduleAffected = false;
for (GeneratingCompiler.GenerationItem[] _items : productionAndTestItems) {
if (_items.length > 0) {
final VirtualFile outputDir = getGenerationOutputDir(compiler, module, _items[0].isTestSource());
final GeneratingCompiler.GenerationItem[] successfullyGenerated = compiler.generate(context, _items, outputDir);
context.getProgressIndicator().setText(CompilerBundle.message("progress.updating.caches"));
if (successfullyGenerated.length > 0) {
moduleAffected = true;
}
for (final GeneratingCompiler.GenerationItem item : successfullyGenerated) {
final String fullOutputPath = itemToOutputPathMap.get(item);
cache.update(fullOutputPath, item.getValidityState());
final File file = new File(fullOutputPath);
filesToRefresh.add(file);
generatedFiles.add(file);
}
}
}
if (moduleAffected) {
affectedModules.add(module);
}
}
}
finally {
context.getProgressIndicator().popState();
}
}
}
finally {
context.getProgressIndicator().pushState();
CompilerUtil.refreshIOFiles(filesToRefresh);
if (forceGenerate && !generatedFiles.isEmpty()) {
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
List<VirtualFile> vFiles = new ArrayList<VirtualFile>(generatedFiles.size());
for (File generatedFile : generatedFiles) {
final VirtualFile vFile = LocalFileSystem.getInstance().findFileByIoFile(generatedFile);
if (vFile != null) {
vFiles.add(vFile);
}
}
final FileSetCompileScope additionalScope = new FileSetCompileScope(vFiles.toArray(new VirtualFile[vFiles.size()]),
affectedModules.toArray(
new Module[affectedModules.size()]));
context.addScope(additionalScope);
}
});
}
if (cache.isDirty()) {
context.getProgressIndicator().setText(CompilerBundle.message("progress.saving.caches"));
cache.save();
}
context.getProgressIndicator().popState();
}
return !toGenerate.isEmpty() || !filesToRefresh.isEmpty();
}
private static GeneratingCompiler.GenerationItem[][] splitGenerationItems(final Set<GeneratingCompiler.GenerationItem> items) {
final List<GeneratingCompiler.GenerationItem> production = new ArrayList<GeneratingCompiler.GenerationItem>();
final List<GeneratingCompiler.GenerationItem> tests = new ArrayList<GeneratingCompiler.GenerationItem>();
for (GeneratingCompiler.GenerationItem item : items) {
if (item.isTestSource()) {
tests.add(item);
}
else {
production.add(item);
}
}
return new GeneratingCompiler.GenerationItem[][]{
production.toArray(new GeneratingCompiler.GenerationItem[production.size()]),
tests.toArray(new GeneratingCompiler.GenerationItem[tests.size()])
};
}
private boolean compileSources(final CompileContextImpl context,
final VfsSnapshot snapshot,
final TranslatingCompiler compiler,
final boolean forceCompile,
final boolean isRebuild,
final boolean trackDependencies,
final Set<File> outputDirectories,
final boolean onlyCheckStatus) throws ExitException {
final TranslatingCompilerStateCache cache = getTranslatingCompilerCache(compiler);
final CompilerConfiguration compilerConfiguration = CompilerConfiguration.getInstance(myProject);
context.getProgressIndicator().pushState();
final boolean[] wereFilesDeleted = new boolean[]{false};
final Set<VirtualFile> toCompile = new HashSet<VirtualFile>();
try {
final Set<String> toDelete = new HashSet<String>();
final Set<String> urlsWithSourceRemoved = new HashSet<String>();
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
findOutOfDateFiles(compiler, forceCompile, cache, toCompile, context);
if (trackDependencies && !toCompile.isEmpty()) { // should add dependent files
final FileTypeManager fileTypeManager = FileTypeManager.getInstance();
final PsiManager psiManager = PsiManager.getInstance(myProject);
final VirtualFile[] filesToCompile = toCompile.toArray(new VirtualFile[toCompile.size()]);
Set<String> sourcesWithOutputRemoved = getSourcesWithOutputRemoved(cache);
for (final VirtualFile file : filesToCompile) {
if (fileTypeManager.getFileTypeByFile(file) == StdFileTypes.JAVA) {
final PsiFile psiFile = psiManager.findFile(file);
if (psiFile != null) {
addDependentFiles(psiFile, toCompile, cache, sourcesWithOutputRemoved, compiler, context);
}
}
}
}
if (!isRebuild) {
final ProgressIndicator progressIndicator = context.getProgressIndicator();
progressIndicator.pushState();
progressIndicator.setText(CompilerBundle.message("progress.searching.for.files.to.delete"));
findFilesToDelete(snapshot, urlsWithSourceRemoved, cache, toCompile, context, toDelete, compilerConfiguration);
progressIndicator.popState();
}
}
});
if (onlyCheckStatus) {
if (toDelete.isEmpty() && toCompile.isEmpty()) {
return false;
}
if (LOG.isDebugEnabled()) {
if (!toDelete.isEmpty()) {
LOG.debug("Found items to delete, compiler " + compiler.getDescription());
}
if (!toCompile.isEmpty()) {
LOG.debug("Found items to compile, compiler " + compiler.getDescription());
}
}
throw new ExitException(ExitStatus.CANCELLED);
}
if (!toDelete.isEmpty()) {
try {
wereFilesDeleted[0] = syncOutputDir(urlsWithSourceRemoved, context, toDelete, cache, outputDirectories);
}
catch (CacheCorruptedException e) {
LOG.info(e);
context.requestRebuildNextTime(e.getMessage());
}
}
if (wereFilesDeleted[0] && !toDelete.isEmpty()) {
CompilerUtil.refreshPaths(toDelete.toArray(new String[toDelete.size()]));
}
if ((wereFilesDeleted[0] || !toCompile.isEmpty()) && context.getMessageCount(CompilerMessageCategory.ERROR) == 0) {
final TranslatingCompiler.ExitStatus exitStatus = compiler.compile(context, toCompile.toArray(new VirtualFile[toCompile.size()]));
updateInternalCaches(cache, context, exitStatus.getSuccessfullyCompiled(), exitStatus.getFilesToRecompile());
}
}
finally {
if (cache.isDirty()) {
context.getProgressIndicator().setText(CompilerBundle.message("progress.saving.caches"));
if (cache.isDirty()) {
if (LOG.isDebugEnabled()) {
LOG.debug("--Saving translating cache for compiler " + compiler.getDescription());
}
cache.save();
if (LOG.isDebugEnabled()) {
LOG.debug("--Done");
}
}
}
context.getProgressIndicator().popState();
}
return !toCompile.isEmpty() || wereFilesDeleted[0];
}
private Set<String> getSourcesWithOutputRemoved(TranslatingCompilerStateCache cache) {
final Set<String> set = new HashSet<String>();
final LocalFileSystem lfs = LocalFileSystem.getInstance();
for (Iterator<String> it = cache.getOutputUrlsIterator(); it.hasNext();) {
String outputPath = it.next();
if (lfs.findFileByPath(outputPath) == null/*!myOutputFilesOnDisk.contains(outputPath)*/) {
set.add(cache.getSourceUrl(outputPath));
}
}
return set;
}
private void findFilesToDelete(VfsSnapshot snapshot,
final Set<String> urlsWithSourceRemoved,
final TranslatingCompilerStateCache cache,
final Set<VirtualFile> toCompile,
final CompileContextImpl context,
final Set<String> toDelete,
final CompilerConfiguration compilerConfiguration) {
final CompileScope scope = context.getCompileScope();
final LocalFileSystem lfs = LocalFileSystem.getInstance();
final boolean outputDirsUpToDate = CompilerDirectoryTimestamp.isUpToDate(Arrays.asList(context.getAllOutputDirectories()));
for (Iterator<String> it = cache.getOutputUrlsIterator(); it.hasNext();) {
final String outputPath = it.next();
final String sourceUrl = cache.getSourceUrl(outputPath);
if (!scope.belongs(sourceUrl)) {
continue;
}
final VirtualFile sourceFile = snapshot.getFileByUrl(sourceUrl);
boolean needRecompile = false;
boolean shouldDelete;
if (outputDirsUpToDate || lfs.findFileByPath(outputPath) != null/*myOutputFilesOnDisk.contains(outputPath)*/) {
if (sourceFile == null) {
shouldDelete = true/*scope.belongs(sourceUrl)*/;
}
else {
if (toCompile.contains(sourceFile)) {
// some crazy users store their resources (which is source file for us) directly in the output dir
// we should not delete files which are both output and source files
shouldDelete = !FileUtil.pathsEqual(outputPath, VirtualFileManager.extractPath(sourceUrl));
}
else {
final String currentOutputDir = getModuleOutputDirForFile(context, sourceFile);
if (currentOutputDir != null) {
final String className = cache.getClassName(outputPath);
//noinspection HardCodedStringLiteral
if (className == null || isUnderOutputDir(currentOutputDir, outputPath, className)) {
shouldDelete = false;
}
else {
// output for this source has been changed or the output dir was changed, need to recompile to the new output dir
shouldDelete = true;
needRecompile = true;
}
}
else {
shouldDelete = true;
}
}
}
}
else {
// output for this source has been deleted or the output dir was changed, need to recompile
needRecompile = true;
shouldDelete = true; // in case the output dir was changed, should delete from the previous location
}
if (shouldDelete) {
toDelete.add(outputPath);
}
if (needRecompile) {
if (sourceFile != null /*&& scope.belongs(sourceUrl)*/) {
if (!compilerConfiguration.isExcludedFromCompilation(sourceFile)) {
toCompile.add(sourceFile);
}
}
}
if (sourceFile == null) {
urlsWithSourceRemoved.add(outputPath);
}
}
}
private static boolean isUnderOutputDir(final String outputDir, final String outputPath, final String className) {
final int outputRootLen = outputPath.length() - className.length() - ".class".length() - 1;
return outputDir.length() == outputRootLen &&
outputDir.regionMatches(!SystemInfo.isFileSystemCaseSensitive, 0, outputPath, 0, outputRootLen);
}
private static void updateInternalCaches(final TranslatingCompilerStateCache cache,
final CompileContextImpl context,
final TranslatingCompiler.OutputItem[] successfullyCompiled,
final VirtualFile[] filesToRecompile) {
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
context.getProgressIndicator().setText(CompilerBundle.message("progress.updating.caches"));
final FileTypeManager typeManager = FileTypeManager.getInstance();
if (LOG.isDebugEnabled()) {
LOG.debug("Updating internal caches: successfully compiled " + successfullyCompiled.length + " files; toRecompile: " +
filesToRecompile.length + " files");
}
for (final TranslatingCompiler.OutputItem item : successfullyCompiled) {
final String outputPath = item.getOutputPath();
final VirtualFile sourceFile = item.getSourceFile();
final String className;
if (outputPath != null && StdFileTypes.JAVA.equals(typeManager.getFileTypeByFile(sourceFile))) {
final String outputDir = item.getOutputRootDirectory();
if (outputDir != null) {
if (!FileUtil.startsWith(outputPath, outputDir)) {
LOG.error(outputPath + " does not start with " + outputDir);
}
className = MakeUtil.relativeClassPathToQName(outputPath.substring(outputDir.length(), outputPath.length()), '/');
}
else {
// outputDir might be null for package-info.java (package annotation)
className = null;
}
}
else {
className = null;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Putting: [outputPath, className, sourceFile] = [" + outputPath + ";" + className + ";" + sourceFile.getPresentableUrl() + "]");
}
cache.update(outputPath, className, sourceFile);
}
for (VirtualFile aFilesToRecompile : filesToRecompile) {
cache.markAsModified(aFilesToRecompile);
}
}
});
}
private static boolean syncOutputDir(final Set<String> urlsWithSourceRemoved,
final CompileContextImpl context,
final Set<String> toDelete,
final TranslatingCompilerStateCache cache,
final Set<File> outputDirectories) throws CacheCorruptedException {
DeleteHelper deleteHelper = new DeleteHelper(outputDirectories);
int total = toDelete.size();
final DependencyCache dependencyCache = context.getDependencyCache();
final boolean isTestMode = ApplicationManager.getApplication().isUnitTestMode();
context.getProgressIndicator().pushState();
try {
context.getProgressIndicator().setText(CompilerBundle.message("progress.synchronizing.output.directory"));
int current = 0;
boolean wereFilesDeleted = false;
for (final String outputPath : toDelete) {
context.getProgressIndicator().setFraction(((double)(++current)) / total);
if (deleteHelper.delete(outputPath)) {
wereFilesDeleted = true;
final String qName = cache.getClassName(outputPath);
if (qName != null) {
final int id = dependencyCache.getSymbolTable().getId(qName);
dependencyCache.addTraverseRoot(id);
if (urlsWithSourceRemoved.contains(outputPath)) {
dependencyCache.markSourceRemoved(id);
}
}
if (isTestMode) {
CompilerManagerImpl.addDeletedPath(outputPath);
}
cache.remove(outputPath);
}
else if (!new File(outputPath).exists()) {
cache.remove(outputPath);
}
}
return wereFilesDeleted;
}
finally {
deleteHelper.finish();
context.getProgressIndicator().popState();
}
}
private void findOutOfDateFiles(final TranslatingCompiler compiler, final boolean forceCompile,
final TranslatingCompilerStateCache cache,
final Set<VirtualFile> toCompile,
final CompileContext context) {
final CompilerConfiguration compilerConfiguration = CompilerConfiguration.getInstance(myProject);
for (VirtualFile file : context.getCompileScope().getFiles(null, true)) {
if (compiler.isCompilableFile(file, context)) {
if (!forceCompile && compilerConfiguration.isExcludedFromCompilation(file)) {
continue;
}
final String url = file.getUrl();
if (forceCompile || file.getTimeStamp() != cache.getSourceTimestamp(url)) {
if (LOG.isDebugEnabled()) {
LOG.debug("File is out-of-date: " + url + "; current timestamp = " + file.getTimeStamp() + "; stored timestamp = " + cache.getSourceTimestamp(url));
}
toCompile.add(file);
}
}
}
}
private void addDependentFiles(final PsiFile psiFile,
Set<VirtualFile> toCompile,
final TranslatingCompilerStateCache cache, Set<String> sourcesWithOutputRemoved,
TranslatingCompiler compiler,
CompileContextImpl context) {
final DependenciesBuilder builder = new ForwardDependenciesBuilder(myProject, new AnalysisScope(psiFile));
builder.analyze();
final Map<PsiFile, Set<PsiFile>> dependencies = builder.getDependencies();
final Set<PsiFile> dependentFiles = dependencies.get(psiFile);
if (dependentFiles != null && !dependentFiles.isEmpty()) {
for (final PsiFile dependentFile : dependentFiles) {
if (dependentFile instanceof PsiCompiledElement) {
continue;
}
final VirtualFile vFile = dependentFile.getVirtualFile();
if (vFile == null || toCompile.contains(vFile)) {
continue;
}
final String url = vFile.getUrl();
if (!sourcesWithOutputRemoved.contains(url)) {
if (vFile.getTimeStamp() == cache.getSourceTimestamp(url)) {
continue;
}
}
if (!compiler.isCompilableFile(vFile, context)) {
continue;
}
toCompile.add(vFile);
addDependentFiles(dependentFile, toCompile, cache, sourcesWithOutputRemoved, compiler, context);
}
}
}
private String getModuleOutputDirForFile(CompileContext context, VirtualFile file) {
final Module module = context.getModuleByFile(file);
if (module == null) {
return null; // looks like file invalidated
}
final ProjectFileIndex fileIndex = myProjectRootManager.getFileIndex();
return getModuleOutputPath(module, fileIndex.isInTestSourceContent(file));
}
// [mike] performance optimization - this method is accessed > 15,000 times in Aurora
private String getModuleOutputPath(final Module module, boolean inTestSourceContent) {
final Map<Module, String> map = inTestSourceContent ? myModuleTestOutputPaths : myModuleOutputPaths;
String path = map.get(module);
if (path == null) {
path = CompilerPaths.getModuleOutputPath(module, inTestSourceContent);
map.put(module, path);
}
return path;
}
private boolean processFiles(final FileProcessingCompilerAdapter adapter,
final boolean forceCompile,
final boolean checkScope,
final boolean onlyCheckStatus) throws ExitException {
final CompileContext context = adapter.getCompileContext();
final FileProcessingCompilerStateCache cache = getFileProcessingCompilerCache(adapter.getCompiler());
final FileProcessingCompiler.ProcessingItem[] items = adapter.getProcessingItems();
if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
return false;
}
final CompileScope scope = context.getCompileScope();
final List<FileProcessingCompiler.ProcessingItem> toProcess = new ArrayList<FileProcessingCompiler.ProcessingItem>();
final Set<String> allUrls = new HashSet<String>();
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
for (FileProcessingCompiler.ProcessingItem item : items) {
final VirtualFile file = item.getFile();
if (file == null) {
LOG.assertTrue(false, "FileProcessingCompiler.ProcessingItem.getFile() must not return null: compiler " + adapter.getCompiler().getDescription());
}
final String url = file.getUrl();
allUrls.add(url);
if (!forceCompile && cache.getTimestamp(url) == file.getTimeStamp()) {
final ValidityState state = cache.getExtState(url);
final ValidityState itemState = item.getValidityState();
if (state != null ? state.equalsTo(itemState) : itemState == null) {
continue;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Adding item to process: " + url + "; saved ts= " + cache.getTimestamp(url) + "; VFS ts=" + file.getTimeStamp());
}
toProcess.add(item);
}
}
});
final String[] urls = cache.getUrls();
final List<String> urlsToRemove = new ArrayList<String>();
if (urls.length > 0) {
context.getProgressIndicator().pushState();
context.getProgressIndicator().setText(CompilerBundle.message("progress.processing.outdated.files"));
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
for (final String url : urls) {
if (!allUrls.contains(url)) {
if (!checkScope || scope.belongs(url)) {
urlsToRemove.add(url);
}
}
}
}
});
if (!onlyCheckStatus && !urlsToRemove.isEmpty()) {
for (final String url : urlsToRemove) {
adapter.processOutdatedItem(context, url, cache.getExtState(url));
cache.remove(url);
}
}
context.getProgressIndicator().popState();
}
if (onlyCheckStatus) {
if (urlsToRemove.isEmpty() && toProcess.isEmpty()) {
return false;
}
if (LOG.isDebugEnabled()) {
if (!urlsToRemove.isEmpty()) {
LOG.debug("Found urls to remove, compiler " + adapter.getCompiler().getDescription());
for (String url : urlsToRemove) {
LOG.debug("\t" + url);
}
}
if (!toProcess.isEmpty()) {
LOG.debug("Found items to compile, compiler " + adapter.getCompiler().getDescription());
for (FileProcessingCompiler.ProcessingItem item : toProcess) {
LOG.debug("\t" + item.getFile().getPresentableUrl());
}
}
}
throw new ExitException(ExitStatus.CANCELLED);
}
if (toProcess.isEmpty()) {
return false;
}
context.getProgressIndicator().pushState();
final FileProcessingCompiler.ProcessingItem[] processed =
adapter.process(toProcess.toArray(new FileProcessingCompiler.ProcessingItem[toProcess.size()]));
context.getProgressIndicator().popState();
if (processed.length > 0) {
context.getProgressIndicator().pushState();
context.getProgressIndicator().setText(CompilerBundle.message("progress.updating.caches"));
try {
List<VirtualFile> vFiles = new ArrayList<VirtualFile>(processed.length);
for (FileProcessingCompiler.ProcessingItem aProcessed : processed) {
final VirtualFile file = aProcessed.getFile();
vFiles.add(file);
if (LOG.isDebugEnabled()) {
LOG.debug("File processed by " + adapter.getCompiler().getDescription());
LOG.debug("\tFile processed " + file.getPresentableUrl() + "; ts=" + file.getTimeStamp());
}
}
LocalFileSystem.getInstance().refreshFiles(vFiles);
if (LOG.isDebugEnabled()) {
LOG.debug("Files after VFS refresh:");
for (VirtualFile file : vFiles) {
LOG.debug("\t" + file.getPresentableUrl() + "; ts=" + file.getTimeStamp());
}
}
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
for (FileProcessingCompiler.ProcessingItem item : processed) {
cache.update(item.getFile(), item.getValidityState());
}
}
});
}
finally {
if (cache.isDirty()) {
context.getProgressIndicator().setText(CompilerBundle.message("progress.saving.caches"));
cache.save();
}
context.getProgressIndicator().popState();
}
}
return true;
}
public TranslatingCompilerStateCache getTranslatingCompilerCache(TranslatingCompiler compiler) {
Object cache = myCompilerToCacheMap.get(compiler);
if (cache == null) {
cache = new TranslatingCompilerStateCache(myCachesDirectoryPath, getCompilerIdString(compiler));
myCompilerToCacheMap.put(compiler, cache);
}
else {
LOG.assertTrue(cache instanceof TranslatingCompilerStateCache);
}
return (TranslatingCompilerStateCache)cache;
}
private FileProcessingCompilerStateCache getFileProcessingCompilerCache(FileProcessingCompiler compiler) {
Object cache = myCompilerToCacheMap.get(compiler);
if (cache == null) {
cache = new FileProcessingCompilerStateCache(myCachesDirectoryPath, getCompilerIdString(compiler), compiler);
myCompilerToCacheMap.put(compiler, cache);
}
else {
LOG.assertTrue(cache instanceof FileProcessingCompilerStateCache);
}
return (FileProcessingCompilerStateCache)cache;
}
private StateCache<ValidityState> getGeneratingCompilerCache(final GeneratingCompiler compiler) {
Object cache = myCompilerToCacheMap.get(compiler);
if (cache == null) {
cache = new StateCache<ValidityState>(myCachesDirectoryPath + File.separator + getCompilerIdString(compiler) + "_timestamp.dat") {
public ValidityState read(DataInputStream stream) throws IOException {
return compiler.createValidityState(stream);
}
public void write(ValidityState validityState, DataOutputStream stream) throws IOException {
validityState.save(stream);
}
};
myCompilerToCacheMap.put(compiler, cache);
}
return (StateCache<ValidityState>)cache;
}
private void dropInternalCache(Compiler compiler) {
myCompilerToCacheMap.remove(compiler);
}
public static String getCompilerIdString(Compiler compiler) {
@NonNls String description = compiler.getDescription();
return description.replaceAll("\\s+", "_").toLowerCase();
}
public void executeCompileTask(final CompileTask task,
final CompileScope scope,
final String contentName,
final Runnable onTaskFinished) {
final CompilerTask indicator =
new CompilerTask(myProject, CompilerWorkspaceConfiguration.getInstance(myProject).COMPILE_IN_BACKGROUND, contentName, false);
final CompileContextImpl compileContext = new CompileContextImpl(myProject, indicator, scope, null, this, false);
FileDocumentManager.getInstance().saveAllDocuments();
//noinspection HardCodedStringLiteral
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
synchronized (CompilerManager.getInstance(myProject)) {
ProgressManager.getInstance().runProcess(new Runnable() {
public void run() {
try {
task.execute(compileContext);
}
catch (ProcessCanceledException ex) {
// suppressed
}
finally {
if (onTaskFinished != null) {
onTaskFinished.run();
}
}
}
}, compileContext.getProgressIndicator());
}
}
});
}
private boolean executeCompileTasks(CompileContext context, boolean beforeTasks) {
final CompilerManager manager = CompilerManager.getInstance(myProject);
final ProgressIndicator progressIndicator = context.getProgressIndicator();
try {
CompileTask[] tasks = beforeTasks ? manager.getBeforeTasks() : manager.getAfterTasks();
if (tasks.length > 0) {
progressIndicator.setText(beforeTasks
? CompilerBundle.message("progress.executing.precompile.tasks")
: CompilerBundle.message("progress.executing.postcompile.tasks"));
for (CompileTask task : tasks) {
if (!task.execute(context)) {
return false;
}
}
}
}
finally {
WindowManager.getInstance().getStatusBar(myProject).setInfo("");
if (progressIndicator instanceof CompilerTask) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
((CompilerTask)progressIndicator).showCompilerContent();
}
});
}
}
return true;
}
// todo: add validation for module chunks: all modules that form a chunk must have the same JDK
private boolean validateCompilerConfiguration(final CompileScope scope, boolean checkOutputAndSourceIntersection) {
final Module[] scopeModules = scope.getAffectedModules()/*ModuleManager.getInstance(myProject).getModules()*/;
final List<String> modulesWithoutOutputPathSpecified = new ArrayList<String>();
final List<String> modulesWithoutJdkAssigned = new ArrayList<String>();
final Set<File> nonExistingOutputPaths = new HashSet<File>();
for (final Module module : scopeModules) {
final boolean hasSources = hasSources(module, false);
final boolean hasTestSources = hasSources(module, true);
if (!hasSources && !hasTestSources) {
// If module contains no sources, shouldn't have to select JDK or output directory (SCR #19333)
// todo still there may be problems with this approach if some generated files are attributed by this module
continue;
}
final ProjectJdk jdk = ModuleRootManager.getInstance(module).getJdk();
if (jdk == null) {
modulesWithoutJdkAssigned.add(module.getName());
}
final String outputPath = getModuleOutputPath(module, false);
final String testsOutputPath = getModuleOutputPath(module, true);
if (outputPath == null && testsOutputPath == null) {
modulesWithoutOutputPathSpecified.add(module.getName());
}
else {
if (outputPath != null) {
final File file = new File(outputPath.replace('/', File.separatorChar));
if (!file.exists()) {
nonExistingOutputPaths.add(file);
}
}
else {
if (hasSources) {
modulesWithoutOutputPathSpecified.add(module.getName());
}
}
if (testsOutputPath != null) {
final File f = new File(testsOutputPath.replace('/', File.separatorChar));
if (!f.exists()) {
nonExistingOutputPaths.add(f);
}
}
else {
if (hasTestSources) {
modulesWithoutOutputPathSpecified.add(module.getName());
}
}
}
}
if (!modulesWithoutJdkAssigned.isEmpty()) {
showNotSpecifiedError("error.jdk.not.specified", modulesWithoutJdkAssigned, ClasspathEditor.NAME);
return false;
}
if (!modulesWithoutOutputPathSpecified.isEmpty()) {
showNotSpecifiedError("error.output.not.specified", modulesWithoutOutputPathSpecified, ContentEntriesEditor.NAME);
return false;
}
if (!nonExistingOutputPaths.isEmpty()) {
for (File file : nonExistingOutputPaths) {
final boolean succeeded = file.mkdirs();
if (!succeeded) {
Messages.showMessageDialog(myProject, CompilerBundle.message("error.failed.to.create.directory", file.getPath()),
CommonBundle.getErrorTitle(), Messages.getErrorIcon());
return false;
}
}
final Boolean refreshSuccess = ApplicationManager.getApplication().runWriteAction(new Computable<Boolean>() {
public Boolean compute() {
LocalFileSystem.getInstance().refreshIoFiles(nonExistingOutputPaths);
for (File file : nonExistingOutputPaths) {
if (LocalFileSystem.getInstance().findFileByIoFile(file) == null) {
return Boolean.FALSE;
}
}
return Boolean.TRUE;
}
});
if (!refreshSuccess.booleanValue()) {
return false;
}
dropScopesCaches();
}
if (checkOutputAndSourceIntersection) {
if (myShouldClearOutputDirectory) {
if (!validateOutputAndSourcePathsIntersection()) {
return false;
}
}
}
final List<Chunk<Module>> chunks = ModuleCompilerUtil.getSortedModuleChunks(myProject, scopeModules);
for (final Chunk<Module> chunk : chunks) {
final Set<Module> chunkModules = chunk.getNodes();
if (chunkModules.size() <= 1) {
continue; // no need to check one-module chunks
}
ProjectJdk jdk = null;
LanguageLevel languageLevel = null;
for (final Module module : chunkModules) {
final ProjectJdk moduleJdk = ModuleRootManager.getInstance(module).getJdk();
if (jdk == null) {
jdk = moduleJdk;
}
else {
if (!jdk.equals(moduleJdk)) {
showCyclicModulesHaveDifferentJdksError(chunkModules.toArray(new Module[chunkModules.size()]));
return false;
}
}
LanguageLevel moduleLanguageLevel = module.getEffectiveLanguageLevel();
if (languageLevel == null) {
languageLevel = moduleLanguageLevel;
}
else {
if (!languageLevel.equals(moduleLanguageLevel)) {
showCyclicModulesHaveDifferentLanguageLevel(chunkModules.toArray(new Module[chunkModules.size()]));
return false;
}
}
}
}
final Compiler[] allCompilers = CompilerManager.getInstance(myProject).getCompilers(Compiler.class);
for (Compiler compiler : allCompilers) {
if (!compiler.validateConfiguration(scope)) {
return false;
}
}
return true;
}
private void showCyclicModulesHaveDifferentLanguageLevel(Module[] modulesInChunk) {
LOG.assertTrue(modulesInChunk.length > 0);
String moduleNameToSelect = modulesInChunk[0].getName();
final String moduleNames = getModulesString(modulesInChunk);
Messages.showMessageDialog(myProject, CompilerBundle.message("error.chunk.modules.must.have.same.language.level", moduleNames),
CommonBundle.getErrorTitle(), Messages.getErrorIcon());
showConfigurationDialog(moduleNameToSelect, null);
}
private void showCyclicModulesHaveDifferentJdksError(Module[] modulesInChunk) {
LOG.assertTrue(modulesInChunk.length > 0);
String moduleNameToSelect = modulesInChunk[0].getName();
final String moduleNames = getModulesString(modulesInChunk);
Messages.showMessageDialog(myProject, CompilerBundle.message("error.chunk.modules.must.have.same.jdk", moduleNames),
CommonBundle.getErrorTitle(), Messages.getErrorIcon());
showConfigurationDialog(moduleNameToSelect, null);
}
private static String getModulesString(Module[] modulesInChunk) {
final StringBuilder moduleNames = StringBuilderSpinAllocator.alloc();
try {
for (Module module : modulesInChunk) {
if (moduleNames.length() > 0) {
moduleNames.append("\n");
}
moduleNames.append("\"").append(module.getName()).append("\"");
}
return moduleNames.toString();
}
finally {
StringBuilderSpinAllocator.dispose(moduleNames);
}
}
private static boolean hasSources(Module module, boolean checkTestSources) {
final ContentEntry[] contentEntries = ModuleRootManager.getInstance(module).getContentEntries();
for (final ContentEntry contentEntry : contentEntries) {
final SourceFolder[] sourceFolders = contentEntry.getSourceFolders();
for (final SourceFolder sourceFolder : sourceFolders) {
if (sourceFolder.getFile() == null) {
continue; // skip invalid source folders
}
if (checkTestSources) {
if (sourceFolder.isTestSource()) {
return true;
}
}
else {
if (!sourceFolder.isTestSource()) {
return true;
}
}
}
}
return false;
}
private void showNotSpecifiedError(final @NonNls String resourceId, List<String> modules, String tabNameToSelect) {
String nameToSelect = null;
final int maxModulesToShow = 10;
final StringBuilder names = StringBuilderSpinAllocator.alloc();
final String message;
try {
for (String name : modules.size() > maxModulesToShow ? modules.subList(0, maxModulesToShow) : modules) {
if (nameToSelect == null) {
nameToSelect = name;
}
if (names.length() > 0) {
names.append(",\n");
}
names.append("\"");
names.append(name);
names.append("\"");
}
if (modules.size() > maxModulesToShow) {
names.append(",\n...");
}
message = CompilerBundle.message(resourceId, modules.size(), names.toString());
}
finally {
StringBuilderSpinAllocator.dispose(names);
}
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error(message);
}
Messages.showMessageDialog(myProject, message, CommonBundle.getErrorTitle(), Messages.getErrorIcon());
showConfigurationDialog(nameToSelect, tabNameToSelect);
}
private boolean validateOutputAndSourcePathsIntersection() {
final Module[] allModules = ModuleManager.getInstance(myProject).getModules();
final VirtualFile[] outputPaths = CompilerPathsEx.getOutputDirectories(allModules);
final Set<VirtualFile> affectedOutputPaths = new HashSet<VirtualFile>();
for (Module allModule : allModules) {
final ModuleRootManager rootManager = ModuleRootManager.getInstance(allModule);
final VirtualFile[] sourceRoots = rootManager.getSourceRoots();
for (final VirtualFile outputPath : outputPaths) {
for (VirtualFile sourceRoot : sourceRoots) {
if (VfsUtil.isAncestor(outputPath, sourceRoot, true) || VfsUtil.isAncestor(sourceRoot, outputPath, false)) {
affectedOutputPaths.add(outputPath);
}
}
}
}
if (!affectedOutputPaths.isEmpty()) {
final StringBuilder paths = new StringBuilder();
for (final VirtualFile affectedOutputPath : affectedOutputPaths) {
if (paths.length() < 0) {
paths.append("\n");
}
paths.append(affectedOutputPath.getPath().replace('/', File.separatorChar));
}
final int answer = Messages.showOkCancelDialog(myProject,
CompilerBundle.message("warning.sources.under.output.paths", paths.toString()),
CommonBundle.getErrorTitle(), Messages.getWarningIcon());
if (answer == 0) {
myShouldClearOutputDirectory = false;
return true;
}
else {
return false;
}
}
return true;
}
private void showConfigurationDialog(String moduleNameToSelect, String tabNameToSelect) {
ModulesConfigurator.showDialog(myProject, moduleNameToSelect, tabNameToSelect, false);
}
private VirtualFile lookupVFile(final GeneratingCompiler compiler, final Module module, final boolean forTestSources) {
final String path = getGenerationOutputPath(compiler, module, forTestSources);
final File file = new File(path);
final VirtualFile vFile;
if (file.mkdirs()) {
vFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file);
}
else {
vFile = LocalFileSystem.getInstance().findFileByPath(path);
}
return vFile;
}
}
|
package org.mifos.ui.personnel.controller;
import javax.servlet.http.HttpServletRequest;
import org.mifos.application.admin.servicefacade.PersonnelServiceFacade;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
@Controller
public class PersonnelController {
private static final String LOCALE_ID = "id";
@Autowired
private PersonnelServiceFacade personnelServiceFacade;
@RequestMapping("/changeLocale.ftl")
public ModelAndView changeUserLocale(@RequestParam(value = LOCALE_ID, required = false) Short id, HttpServletRequest request) {
Short currentLocaleId = personnelServiceFacade.changeUserLocale(id, request);
ModelAndView mav = new ModelAndView();
mav.addObject("CURRENT_LOCALE_ID", currentLocaleId);
mav.addObject("LOCALE_LIST", personnelServiceFacade.getDisplayLocaleList());
mav.setViewName("personnel/changeLocale");
return mav;
}
}
|
package com.tidyjava.bp.sitemap;
import com.tidyjava.bp.post.Post;
import com.tidyjava.bp.post.PostReader;
import com.tidyjava.bp.post.Tag;
import com.tidyjava.bp.util.DateUtils;
import cz.jiripinkas.jsitemapgenerator.WebPage;
import cz.jiripinkas.jsitemapgenerator.WebPageBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static com.tidyjava.bp.util.DateUtils.toDate;
@Component
public class SitemapGenerator {
private static final double MEDIUM_PRIORITY = 0.7;
private static final double LOW_PRIORITY = 0.4;
@Value("${blog.url}")
private String blogUrl;
@Autowired
private PostReader postReader;
String generate() {
cz.jiripinkas.jsitemapgenerator.generator.SitemapGenerator sitemapGenerator = new cz.jiripinkas.jsitemapgenerator.generator.SitemapGenerator(blogUrl);
List<Post> allPosts = postReader.readAll();
sitemapGenerator.addPage(homePage(allPosts));
sitemapGenerator.addPages(postPages(allPosts));
sitemapGenerator.addPages(tagPages(allPosts));
return sitemapGenerator.constructSitemapString();
}
private WebPage homePage(List<Post> posts) {
return new WebPageBuilder()
.name("/")
.lastMod(lastModification(posts))
.changeFreqDaily()
.priorityMax()
.build();
}
private List<WebPage> postPages(List<Post> posts) {
return posts.stream()
.map(this::toWebPage)
.collect(Collectors.toList());
}
private WebPage toWebPage(Post post) {
return new WebPageBuilder()
.name(post.getUrl())
.lastMod(toDate(post.getDate()))
.changeFreqMonthly()
.priority(MEDIUM_PRIORITY)
.build();
}
private List<WebPage> tagPages(List<Post> posts) {
Set<Tag> tags = getTags(posts);
return tags.stream()
.map(tag -> toWebPage(tag, posts))
.collect(Collectors.toList());
}
private Set<Tag> getTags(List<Post> posts) {
return posts.stream()
.flatMap(post -> post.getTags().stream())
.collect(Collectors.toSet());
}
private WebPage toWebPage(Tag tag, List<Post> posts) {
List<Post> postsWithTag = findByTag(tag, posts);
return new WebPageBuilder()
.name(tag.getUrl())
.lastMod(lastModification(postsWithTag))
.changeFreqWeekly()
.priority(LOW_PRIORITY)
.build();
}
private List<Post> findByTag(Tag tag, List<Post> posts) {
return posts.stream()
.filter(post -> post.hasTag(tag))
.collect(Collectors.toList());
}
private Date lastModification(List<Post> posts) {
return posts.stream()
.map(Post::getDate)
.map(DateUtils::toDate)
.max(Date::compareTo)
.orElseThrow(IllegalStateException::new);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.