answer stringlengths 17 10.2M |
|---|
package com.vaadin.terminal.gwt.client.ui;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Node;
import com.google.gwt.dom.client.NodeList;
import com.google.gwt.dom.client.ObjectElement;
import com.google.gwt.dom.client.Style;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.DomEvent.Type;
import com.google.gwt.event.shared.EventHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Element;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.ui.HTML;
import com.vaadin.terminal.gwt.client.ApplicationConnection;
import com.vaadin.terminal.gwt.client.BrowserInfo;
import com.vaadin.terminal.gwt.client.Paintable;
import com.vaadin.terminal.gwt.client.UIDL;
import com.vaadin.terminal.gwt.client.Util;
import com.vaadin.terminal.gwt.client.VConsole;
import com.vaadin.terminal.gwt.client.VTooltip;
public class VEmbedded extends HTML implements Paintable {
public static final String CLICK_EVENT_IDENTIFIER = "click";
private static String CLASSNAME = "v-embedded";
private String height;
private String width;
private Element browserElement;
private String type;
private ApplicationConnection client;
private final ClickEventHandler clickEventHandler = new ClickEventHandler(
this, CLICK_EVENT_IDENTIFIER) {
@Override
protected <H extends EventHandler> HandlerRegistration registerHandler(
H handler, Type<H> type) {
return addDomHandler(handler, type);
}
};
public VEmbedded() {
setStyleName(CLASSNAME);
}
public void updateFromUIDL(UIDL uidl, ApplicationConnection client) {
if (client.updateComponent(this, uidl, true)) {
return;
}
this.client = client;
boolean clearBrowserElement = true;
clickEventHandler.handleEventHandlerRegistration(client);
if (uidl.hasAttribute("type")) {
type = uidl.getStringAttribute("type");
if (type.equals("image")) {
addStyleName(CLASSNAME + "-image");
Element el = null;
boolean created = false;
NodeList<Node> nodes = getElement().getChildNodes();
if (nodes != null && nodes.getLength() == 1) {
Node n = nodes.getItem(0);
if (n.getNodeType() == Node.ELEMENT_NODE) {
Element e = (Element) n;
if (e.getTagName().equals("IMG")) {
el = e;
}
}
}
if (el == null) {
setHTML("");
el = DOM.createImg();
created = true;
client.addPngFix(el);
DOM.sinkEvents(el, Event.ONLOAD);
}
// Set attributes
Style style = el.getStyle();
String w = uidl.getStringAttribute("width");
if (w != null) {
style.setProperty("width", w);
} else {
style.setProperty("width", "");
}
String h = uidl.getStringAttribute("height");
if (h != null) {
style.setProperty("height", h);
} else {
style.setProperty("height", "");
}
DOM.setElementProperty(el, "src", getSrc(uidl, client));
if (created) {
// insert in dom late
getElement().appendChild(el);
}
/*
* Sink tooltip events so tooltip is displayed when hovering the
* image.
*/
sinkEvents(VTooltip.TOOLTIP_EVENTS);
} else if (type.equals("browser")) {
addStyleName(CLASSNAME + "-browser");
if (browserElement == null) {
setHTML("<iframe width=\"100%\" height=\"100%\" frameborder=\"0\" allowTransparency=\"true\" src=\""
+ getSrc(uidl, client)
+ "\" name=\""
+ uidl.getId() + "\"></iframe>");
browserElement = DOM.getFirstChild(getElement());
} else {
DOM.setElementAttribute(browserElement, "src",
getSrc(uidl, client));
}
clearBrowserElement = false;
} else {
VConsole.log("Unknown Embedded type '" + type + "'");
}
} else if (uidl.hasAttribute("mimetype")) {
final String mime = uidl.getStringAttribute("mimetype");
if (mime.equals("application/x-shockwave-flash")) {
// Handle embedding of Flash
setHTML(createFlashEmbed(uidl));
} else if (mime.equals("image/svg+xml")) {
addStyleName(CLASSNAME + "-svg");
String data;
Map<String, String> parameters = getParameters(uidl);
if (parameters.get("data") == null) {
data = getSrc(uidl, client);
} else {
data = "data:image/svg+xml," + parameters.get("data");
}
setHTML("");
ObjectElement obj = Document.get().createObjectElement();
obj.setType(mime);
obj.setData(data);
if (width != null) {
obj.getStyle().setProperty("width", "100%");
}
if (height != null) {
obj.getStyle().setProperty("height", "100%");
}
getElement().appendChild(obj);
} else {
VConsole.log("Unknown Embedded mimetype '" + mime + "'");
}
} else {
VConsole.log("Unknown Embedded; no type or mimetype attribute");
}
if (clearBrowserElement) {
browserElement = null;
}
}
/**
* Creates the Object and Embed tags for the Flash plugin so it works
* cross-browser
*
* @param uidl
* The UIDL
* @return Tags concatenated into a string
*/
private String createFlashEmbed(UIDL uidl) {
addStyleName(CLASSNAME + "-flash");
/*
* To ensure cross-browser compatibility we are using the twice-cooked
* method to embed flash i.e. we add a OBJECT tag for IE ActiveX and
* inside it a EMBED for all other browsers.
*/
StringBuilder html = new StringBuilder();
// Start the object tag
html.append("<object ");
html.append("classid=\"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000\" ");
html.append("codebase=\"http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=9,0,0,0\" ");
// Add width and height
html.append("width=\"" + width + "\" ");
html.append("height=\"" + height + "\" ");
html.append("type=\"application/x-shockwave-flash\" ");
// End object tag
html.append(">");
// Ensure we have an movie parameter
Map<String, String> parameters = getParameters(uidl);
if (parameters.get("movie") == null) {
parameters.put("movie", getSrc(uidl, client));
}
// Add parameters to OBJECT
for (String name : parameters.keySet()) {
html.append("<param ");
html.append("name=\"" + escapeAttribute(name) + "\" ");
html.append("value=\"" + escapeAttribute(parameters.get(name))
+ "\" ");
html.append("/>");
}
// Build inner EMBED tag
html.append("<embed ");
html.append("src=\"" + getSrc(uidl, client) + "\" ");
html.append("width=\"" + width + "\" ");
html.append("height=\"" + height + "\" ");
html.append("type=\"application/x-shockwave-flash\" ");
// Add the parameters to the Embed
for (String name : parameters.keySet()) {
html.append(escapeAttribute(name));
html.append("=");
html.append("\"" + escapeAttribute(parameters.get(name)) + "\"");
}
// End embed tag
html.append("</embed>");
// End object tag
html.append("</object>");
return html.toString();
}
/**
* Escapes the string so it is safe to write inside an HTML attribute.
*
* @param attribute
* The string to escape
* @return An escaped version of <literal>attribute</literal>.
*/
private String escapeAttribute(String attribute) {
attribute = attribute.replace("\"", """);
attribute = attribute.replace("'", "&
attribute = attribute.replace(">", ">");
attribute = attribute.replace("<", "<");
attribute = attribute.replace("&", "&");
return attribute;
}
/**
* Returns a map (name -> value) of all parameters in the UIDL.
*
* @param uidl
* @return
*/
private static Map<String, String> getParameters(UIDL uidl) {
Map<String, String> parameters = new HashMap<String, String>();
Iterator<Object> childIterator = uidl.getChildIterator();
while (childIterator.hasNext()) {
Object child = childIterator.next();
if (child instanceof UIDL) {
UIDL childUIDL = (UIDL) child;
if (childUIDL.getTag().equals("embeddedparam")) {
String name = childUIDL.getStringAttribute("name");
String value = childUIDL.getStringAttribute("value");
parameters.put(name, value);
}
}
}
return parameters;
}
/**
* Helper to return translated src-attribute from embedded's UIDL
*
* @param uidl
* @param client
* @return
*/
private String getSrc(UIDL uidl, ApplicationConnection client) {
String url = client.translateVaadinUri(uidl.getStringAttribute("src"));
if (url == null) {
return "";
}
return url;
}
@Override
public void setWidth(String width) {
this.width = width;
if (isDynamicHeight()) {
int oldHeight = getOffsetHeight();
super.setWidth(width);
int newHeight = getOffsetHeight();
/*
* Must notify parent if the height changes as a result of a width
* change
*/
if (oldHeight != newHeight) {
Util.notifyParentOfSizeChange(this, false);
}
} else {
super.setWidth(width);
}
}
private boolean isDynamicWidth() {
return width == null || width.equals("");
}
private boolean isDynamicHeight() {
return height == null || height.equals("");
}
@Override
public void setHeight(String height) {
this.height = height;
super.setHeight(height);
}
@Override
protected void onDetach() {
if (BrowserInfo.get().isIE()) {
// Force browser to fire unload event when component is detached
// from the view (IE doesn't do this automatically)
if (browserElement != null) {
DOM.setElementAttribute(browserElement, "src",
"javascript:false");
}
}
super.onDetach();
}
@Override
public void onBrowserEvent(Event event) {
super.onBrowserEvent(event);
if (DOM.eventGetType(event) == Event.ONLOAD) {
if ("image".equals(type)) {
updateElementDynamicSizeFromImage();
}
Util.notifyParentOfSizeChange(this, true);
}
client.handleTooltipEvent(event, this);
}
/**
* Updates the size of the embedded component's element if size is
* undefined. Without this embeddeds containing images will remain the wrong
* size in certain cases (e.g. #6304).
*/
private void updateElementDynamicSizeFromImage() {
if (isDynamicWidth()) {
getElement().getStyle().setWidth(
getElement().getFirstChildElement().getOffsetWidth(),
Unit.PX);
}
if (isDynamicHeight()) {
getElement().getStyle().setHeight(
getElement().getFirstChildElement().getOffsetHeight(),
Unit.PX);
}
}
} |
package com.vinsol.expensetracker.edit;
import java.util.Calendar;
import android.app.Activity;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnDismissListener;
import android.content.Intent;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnFocusChangeListener;
import android.view.View.OnKeyListener;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import com.flurry.android.FlurryAgent;
import com.vinsol.expensetracker.Constants;
import com.vinsol.expensetracker.DatabaseAdapter;
import com.vinsol.expensetracker.R;
import com.vinsol.expensetracker.helpers.ConfirmSaveEntryDialog;
import com.vinsol.expensetracker.helpers.DateHandler;
import com.vinsol.expensetracker.helpers.DateHelper;
import com.vinsol.expensetracker.helpers.DeleteDialog;
import com.vinsol.expensetracker.helpers.DisplayDate;
import com.vinsol.expensetracker.helpers.FileHelper;
import com.vinsol.expensetracker.helpers.LocationHelper;
import com.vinsol.expensetracker.helpers.SharedPreferencesHelper;
import com.vinsol.expensetracker.helpers.StringProcessing;
import com.vinsol.expensetracker.listing.ExpenseListing;
import com.vinsol.expensetracker.listing.ExpenseSubListing;
import com.vinsol.expensetracker.models.Entry;
abstract class EditAbstract extends Activity implements OnClickListener {
protected Entry mEditList;
protected boolean setLocation = false;
protected EditText editAmount;
protected EditText editTag;
protected Bundle intentExtras;
protected boolean setUnknown = false;
protected int typeOfEntryFinished;
protected int typeOfEntryUnfinished;
protected int typeOfEntry;
protected boolean isChanged = false;
protected DatabaseAdapter mDatabaseAdapter;
protected TextView editHeaderTitle;
protected TextView dateBarDateview;
protected String dateViewString;
protected Button editDelete;
protected Button editSaveEntry;
protected Entry entry;
protected FileHelper fileHelper;
@Override
protected void onStart() {
super.onStart();
FlurryAgent.onStartSession(this, getString(R.string.flurry_key));
}
@Override
protected void onStop() {
super.onStop();
FlurryAgent.onEndSession(this);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.edit_page);
entry = new Entry();
editAmount = (EditText) findViewById(R.id.edit_amount);
editHeaderTitle = (TextView) findViewById(R.id.header_title);
editTag = (EditText) findViewById(R.id.edit_tag);
dateBarDateview = (TextView) findViewById(R.id.edit_date_bar_dateview);
editSaveEntry = (Button) findViewById(R.id.edit_save_entry);
editDelete = (Button) findViewById(R.id.edit_delete);
mDatabaseAdapter = new DatabaseAdapter(this);
editSaveEntry.setOnClickListener(this);
editDelete.setOnClickListener(this);
editAmount.setSelection(editAmount.getText().length());
editAmount.setOnKeyListener(focusTagOnEnter);
editAmount.setOnFocusChangeListener(focusChangeListener);
mDatabaseAdapter.open();
mDatabaseAdapter.editEntryTable(toSave);
mDatabaseAdapter.close();
if(!intentExtras.containsKey(Constants.IS_COMING_FROM_SHOW_PAGE)) {
Intent intentExpenseListing = new Intent(this, ExpenseListing.class);
Bundle mToHighLight = new Bundle();
mToHighLight.putString(Constants.HIGHLIGHT, toSave.id);
intentExpenseListing.putExtras(mToHighLight);
intentExpenseListing.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
if(!intentExtras.containsKey(Constants.POSITION)) {
startActivity(intentExpenseListing);
} else {
mToHighLight.putInt(Constants.POSITION, intentExtras.getInt(Constants.POSITION));
mToHighLight.putParcelable(Constants.ENTRY_LIST_EXTRA, getListOnResult(toSave));
setActivityResult(mToHighLight);
finish();
}
} else {
Bundle tempBundle = new Bundle();
tempBundle.putParcelable(Constants.ENTRY_LIST_EXTRA, getListOnResult(toSave));
if(intentExtras.containsKey(Constants.POSITION)) {
if(checkDataModified()) {
tempBundle.putInt(Constants.POSITION , intentExtras.getInt(Constants.POSITION));
tempBundle.putBoolean(Constants.DATA_CHANGED, true);
}
}
saveEntryStartIntent(tempBundle);
}
finish();
}
private void setActivityResult(Bundle bundle) {
Intent intent = null;
if(intentExtras.containsKey(Constants.IS_COMING_FROM_EXPENSE_LISTING)) {
intent = new Intent(this, ExpenseListing.class);
} else if(intentExtras.containsKey(Constants.IS_COMING_FROM_EXPENSE_SUB_LISTING)) {
intent = new Intent(this, ExpenseSubListing.class);
}
if(intent != null) {
isChanged = checkDataModified();
if(isChanged) {
bundle.putBoolean(Constants.DATA_CHANGED, isChanged);
intentExtras.putAll(bundle);
}
intent.putExtras(intentExtras);
setResult(Activity.RESULT_OK, intent);
}
}
protected Boolean checkDataModified() {
Calendar mCalendar = Calendar.getInstance();
mCalendar.setTimeInMillis(mEditList.timeInMillis);
mCalendar.setFirstDayOfWeek(Calendar.MONDAY);
if (isTagModified() || isAmountModified() || !dateBarDateview.getText().equals(new DisplayDate(mCalendar).getDisplayDate())) {
return true;
}
return false;
}
private boolean isAmountModified() {
if(editAmount.getText().equals("")) {
if(mEditList.amount.equals("?") || mEditList.amount.equals("")) {
return false;
} else {
return true;
}
} else {
if(mEditList.amount.equals("?") || mEditList.amount.equals("")) {
return false;
} else {
if(Double.parseDouble(editAmount.getText().toString()) != Double.parseDouble(mEditList.amount))
return true;
else
return false;
}
}
}
private boolean isTagModified() {
if(editTag.getText().equals("")) {
if(mEditList.description.equals(getString(typeOfEntryFinished)) || mEditList.description.equals(getString(typeOfEntryUnfinished)) || mEditList.description.equals("")) {
return false;
} else {
return true;
}
} else {
if(mEditList.description.equals(getString(typeOfEntryFinished)) || mEditList.description.equals(getString(typeOfEntryUnfinished)) || mEditList.description.equals("")) {
return false;
} else {
if(!editTag.getText().equals(mEditList.description))
return true;
else
return false;
}
}
}
@Override
public void onClick(View v) {
setSoftPanToInputMode(); |
package net.tomp2p.rcon;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeoutException;
import net.tomp2p.connection.ConnectionConfiguration;
import net.tomp2p.connection.DefaultConnectionConfiguration;
import net.tomp2p.connection.Dispatcher;
import net.tomp2p.connection.PeerBean;
import net.tomp2p.connection.PeerConnection;
import net.tomp2p.connection.Responder;
import net.tomp2p.futures.BaseFutureAdapter;
import net.tomp2p.futures.FutureDone;
import net.tomp2p.futures.FuturePeerConnection;
import net.tomp2p.futures.FutureResponse;
import net.tomp2p.message.Message;
import net.tomp2p.message.Message.Type;
import net.tomp2p.message.NeighborSet;
import net.tomp2p.p2p.Peer;
import net.tomp2p.peers.Number160;
import net.tomp2p.peers.PeerAddress;
import net.tomp2p.relay.RelayForwarderRPC;
import net.tomp2p.relay.RelayUtils;
import net.tomp2p.rpc.DispatchHandler;
import net.tomp2p.rpc.RPC;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This RPC handles two things. First of all, it makes sure that messaging via
* reverse connection setup is possible. Second, it is also able to keep the
* established @link {@link PeerConnection} and store it to the @link
* {@link PeerBean}.
*
* @author jonaswagner
*
*/
public class RconRPC extends DispatchHandler {
private static final Logger LOG = LoggerFactory.getLogger(RconRPC.class);
private final Peer peer;
private final ConnectionConfiguration config;
private static final int POSITION_ZERO = 0;
private static final int MESSAGE_VERSION = 1;
public RconRPC(final Peer peer) {
super(peer.peerBean(), peer.connectionBean());
register(RPC.Commands.RCON.getNr());
this.peer = peer;
this.config = new DefaultConnectionConfiguration();
}
/**
* This method is called from the {@link Dispatcher} and handles the reverse
* connection at each step.
*
* REQUEST_1 = relay rcon forwarding. REQUEST_2 = open a TCP channel and
* transmit {@link PeerConnection}. REQUEST_3 = use now open
* {@link PeerConnection} to transmit original message (and eventually store
* the {@link PeerConnection}). REQUEST_4 = store the {@link PeerConnection}
* on the unreachable peer side (only called via startSetupRcon from the
* PeerNAT)
*
* @param message
* @param peerConnection
* @param sign
* @param responder
*/
@Override
public void handleResponse(final Message message, final PeerConnection peerConnection, final boolean sign, final Responder responder)
throws Exception {
LOG.warn("received RconRPC message {}", message);
if (message.type() == Message.Type.REQUEST_1 && message.command() == RPC.Commands.RCON.getNr()) {
// the message reached the relay peer
LOG.warn("handle RconForward for message: " + message);
handleRconForward(message, responder);
} else if (message.type() == Message.Type.REQUEST_2 && message.command() == RPC.Commands.RCON.getNr()) {
// the message reached the unreachable peer
LOG.warn("handle RconSetup for message: " + message);
handleRconSetup(message, responder);
} else if (message.type() == Message.Type.REQUEST_3 && message.command() == RPC.Commands.RCON.getNr()) {
// the message reached the requesting peer
LOG.warn("handle RconAfterconnect for message: " + message);
handleRconAfterconnect(message, responder, peerConnection);
} else if (message.type() == Message.Type.REQUEST_4 && message.command() == RPC.Commands.RCON.getNr()) {
// only called if the PeerConnection should remain open
LOG.warn("handle openConnection for message: " + message);
handleOpenConnection(message, responder, peerConnection);
} else {
throw new IllegalArgumentException("Message content is wrong");
}
}
/**
* This methods is responsible for forwarding the rconSetupMessage from the
* relay to the unreachable Peer. It extracts the already existing
* {@link PeerConnection} of the unreachable peer and forwards then a new
* message with {@link Type} .REQUEST_2.
*
* @param message
* @param responder
*/
private void handleRconForward(final Message message, final Responder responder) {
// the existing peerConnection to the
// unreachable peer
final PeerConnection peerConnection;
// get the relayForwarderRPC via Dispatcher to retrieve the existing
// peerConnection
final RelayForwarderRPC relayForwarderRPC = extractRelayForwarderRPC(message);
if (relayForwarderRPC != null) {
peerConnection = relayForwarderRPC.peerConnection();
final Message forwardMessage = createForwardMessage(message, peerConnection);
// we don't want to use another sendDirect anymore since we don't
// have to send data, thats why we use sendSingle(...)
FutureResponse futureResponse = new FutureResponse(forwardMessage);
futureResponse = RelayUtils.sendSingle(peerConnection, futureResponse, peer.peerBean(), peer.connectionBean(), config);
futureResponse.addListener(new BaseFutureAdapter<FutureResponse>() {
@Override
public void operationComplete(final FutureResponse future) throws Exception {
if (future.isSuccess()) {
// Indicate the reachable peer that the message was
// successfully forwarded
responder.response(createResponseMessage(message, Type.OK));
} else {
handleFail(message, responder, "Exception while forwarding the rconMessage to the unreachable");
}
}
});
} else {
handleFail(message, responder, "no relayForwarder Registered for peerId=" + message.recipient().peerId().toString());
}
}
/**
* This method extracts a registered RelayForwarderRPC from the
* {@link Dispatcher}. This RelayForwarder can then be used to extract the
* {@link PeerConnection} to the unreachable Peer we want to contact.
*
* @param message
* @return relayForwarderRPC
*/
private RelayForwarderRPC extractRelayForwarderRPC(final Message message) {
RelayForwarderRPC relayForwarderRPC = null;
final Dispatcher dispatcher = peer.connectionBean().dispatcher();
final Map<Integer, DispatchHandler> ioHandlers = dispatcher.searchHandlerMap(message.recipient().peerId());
for (Map.Entry<Integer, DispatchHandler> element : ioHandlers.entrySet()) {
if (element.getValue().getClass().equals(RelayForwarderRPC.class)) {
relayForwarderRPC = (RelayForwarderRPC) element.getValue();
break;
}
}
return relayForwarderRPC;
}
/**
* This method creates the message which is sent from the relay peer to the
* unreachable peer.
*
* @param message
* @param peerConnection
* @return forwardMessage
*/
private Message createForwardMessage(final Message message, final PeerConnection peerConnection) {
// creates the Message to forward to the unreachable peer
final Message forwardMessage = new Message();
forwardMessage.type(Message.Type.REQUEST_2);
forwardMessage.command(RPC.Commands.RCON.getNr());
forwardMessage.sender(peer.peerAddress());
forwardMessage.recipient(peerConnection.remotePeer());
forwardMessage.version(MESSAGE_VERSION); // TODO jwa remove magic number
// and find out why
// we need the versionnumber
// transmit PeerAddress of reachablePeer
final NeighborSet ns = new NeighborSet(1);
ns.add(message.sender());
forwardMessage.neighborsSet(ns);
// use same message id for new message to identify the cached message
// afterwards
forwardMessage.messageId(message.messageId());
// we need to keep the peerConnection between the relay and the
// unreachable peer open
forwardMessage.keepAlive(true);
// check if we keep the connection open afterwards
if (!(message.longAt(POSITION_ZERO) == null)) {
forwardMessage.longValue(message.longAt(POSITION_ZERO));
}
return forwardMessage;
}
/**
* This method handles the reverse connection setup on the unreachable peer
* side. It extracts the {@link PeerAddress} from the reachable peer and
* creates a new {@link PeerConnection} to it. Then it informs the reachable
* peer that it is ready via a new message with {@link Type}.REQUEST3.
*
* @param message
* @param responder
* @throws TimeoutException
*/
private void handleRconSetup(final Message message, final Responder responder) throws TimeoutException {
final PeerAddress originalSender;
if (!message.neighborsSet(POSITION_ZERO).neighbors().isEmpty()) {
// extract the PeerAddress from the reachable peer
originalSender = (PeerAddress) message.neighborsSet(POSITION_ZERO).neighbors().toArray()[POSITION_ZERO];
// create new PeerConnectin to the reachable peer
final FuturePeerConnection fpc = peer.createPeerConnection(originalSender);
fpc.addListener(new BaseFutureAdapter<FuturePeerConnection>() {
@Override
public void operationComplete(final FuturePeerConnection future) throws Exception {
if (future.isSuccess()) {
PeerConnection peerConnection = future.peerConnection();
if (peerConnection != null) {
final Message setupMessage = createSetupMessage(message, peerConnection);
FutureResponse futureResponse = new FutureResponse(setupMessage);
futureResponse = RelayUtils.sendSingle(peerConnection, futureResponse, peer.peerBean(), peer.connectionBean(),
config);
futureResponse.addListener(new BaseFutureAdapter<FutureResponse>() {
@Override
public void operationComplete(final FutureResponse future) throws Exception {
if (future.isSuccess()) {
responder.response(createResponseMessage(message, Type.OK));
} else {
handleFail(message, responder,
"Exception while setting up the reverse connection from the unreachable to the original peer!");
}
}
});
} else {
handleFail(message, responder, "the peerConnection was null!");
}
} else {
handleFail(message, responder, "no channel could be established");
}
}
});
} else {
handleFail(message, responder, "the original sender was not transmittet in the neighborsSet!");
}
}
/**
* This method creates the Message which is sent from the unreachable peer
* to the reachable peer.
*
* @param message
* @param peerConnection
* @return setupMessage
*/
private Message createSetupMessage(final Message message, final PeerConnection peerConnection) {
Message setupMessage = new Message();
setupMessage.type(Message.Type.REQUEST_3);
setupMessage.command(RPC.Commands.RCON.getNr());
setupMessage.sender(peer.peerAddress());
setupMessage.recipient(peerConnection.remotePeer());
setupMessage.version(MESSAGE_VERSION); // TODO remove magic number and
// find out why
// we need the versionnumber
// use same message id for new message
setupMessage.messageId(message.messageId());
setupMessage.keepAlive(true);
// check if we keep the connection open afterwards
if (!(message.longAt(POSITION_ZERO) == null)) {
setupMessage.longValue(message.longAt(POSITION_ZERO));
}
return setupMessage;
}
/**
* This method takes the now established {@link PeerConnection} to the
* unreachable peer and sends the original created message to it.
*
* @param message
* @param responder
* @param peerConnection
*/
private void handleRconAfterconnect(final Message message, final Responder responder, final PeerConnection peerConnection) {
// get the original message
final ConcurrentHashMap<Integer, Message> cachedMessages = peer.connectionBean().sender().cachedMessages();
final Message cachedMessage = cachedMessages.remove(message.messageId());
if (cachedMessage != null) {
FutureResponse futureResponse = new FutureResponse(cachedMessage);
futureResponse = RelayUtils.sendSingle(peerConnection, futureResponse, peer.peerBean(), peer.connectionBean(), config);
futureResponse.addListener(new BaseFutureAdapter<FutureResponse>() {
@Override
public void operationComplete(final FutureResponse future) throws Exception {
if (future.isSuccess()) {
LOG.debug("Original Message was sent successfully to unreachablePeer with PeerAddress{" + message.sender() + "}");
// check if the PeerConnection should be stored in the
// PeerBean
if (message.longAt(POSITION_ZERO) != null) {
storePeerConnection(message, peerConnection);
}
responseAndKeepAlive(message, responder);
} else {
handleFail(message, responder, "The Original Message could not be sent!!!");
}
}
});
} else {
handleFail(message, responder, "There was no original message for RconMessageId=" + message.messageId()
+ "! This should not happen!!!");
}
}
/**
* This method is only invoked if startSetupRcon() is called. It stores the
* peerConnection on the unreachable peer side.
*
* @param message
* @param responder
* @param peerConnection
*/
private void handleOpenConnection(final Message message, final Responder responder, final PeerConnection peerConnection) {
storePeerConnection(message, peerConnection);
responseAndKeepAlive(message, responder);
}
/**
* This method stores the now open {@link PeerConnection} of the unreachable
* peer to the {@link PeerBean}.
*
* @param message
* @param peerConnection
*/
private void storePeerConnection(final Message message, final PeerConnection peerConnection) {
// extract the amount of seconds which the connection should remain open
final long current = message.longAt(POSITION_ZERO);
final Integer seconds = (int) current;
// TODO jwa check concurrency!
// insert the connection to a HashMap and store it on the PeerBean
// final Map<PeerConnection, Integer> connection = Collections
// .synchronizedMap(new HashMap<PeerConnection, Integer>());
final HashMap<PeerConnection, Integer> connection = new HashMap<PeerConnection, Integer>();
connection.put(peerConnection, seconds);
peerConnection.closeFuture().addListener(new BaseFutureAdapter<FutureDone<Void>>() {
@Override
public void operationComplete(final FutureDone<Void> future) throws Exception {
// remove the open PeerConnection to the other Peer from
// openPeerConnections in the PeerBean
LOG.debug("Permanent PeerConnection to peer=" + message.sender() + " has been closed.");
peer.peerBean().openPeerConnections().remove(message.sender().peerId());
}
});
// put the now open PeerConnection into the openPeerConnections-Map in
// the PeerBean
final ConcurrentHashMap<Number160, HashMap<PeerConnection, Integer>> openPeerConnections = peer.peerBean().openPeerConnections();
openPeerConnections.put(message.sender().peerId(), connection);
}
/**
* This method checks and sets the keepAlive Flag on a message. The message
* must have set the keepAlive flag to true. If not, the PeerConnection to
* the other Peer closes itself (even if it is a relay).
*
* @param message
* @param responder
*/
private void responseAndKeepAlive(final Message message, final Responder responder) {
if (message.isKeepAlive()) {
responder.response(createResponseMessage(message, Type.OK));
} else {
message.keepAlive(true);
responder.response(createResponseMessage(message, Type.OK));
}
}
/**
* This method is called if something went wrong while the reverse
* connection setup. It responds then with a {@link Type}.EXCEPTION message.
*
* @param message
* @param responder
* @param failReason
*/
private void handleFail(final Message message, final Responder responder, final String failReason) {
LOG.error(failReason);
responder.response(createResponseMessage(message, Type.EXCEPTION));
}
} |
package org.mozartoz.truffle.runtime;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.mozartoz.truffle.Options;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.coro.Coroutine;
import com.oracle.truffle.coro.CoroutineLocal;
public class OzThread implements Runnable {
private static final CoroutineLocal<OzThread> CURRENT_OZ_THREAD = new CoroutineLocal<>();
public static final OzThread MAIN_THREAD = new OzThread();
public static final Map<OzThread, OzBacktrace> BACKTRACES = Options.STACKTRACE_ON_INTERRUPT ? new ConcurrentHashMap<>() : null;
private static long threadsCreated = 1L;
private static long threadsRunnable = 1L;
public static long getNumberOfThreadsRunnable() {
return threadsRunnable;
}
public static long getNumberOfThreadsCreated() {
return threadsCreated;
}
public static OzThread getCurrent() {
return CURRENT_OZ_THREAD.get();
}
private final Coroutine coroutine;
private final OzProc proc;
private String status = "runnable";
private OzThread() {
coroutine = (Coroutine) Coroutine.current();
proc = null;
setInitialOzThread();
}
public OzThread(OzProc proc) {
this.proc = proc;
this.coroutine = new Coroutine(this, 1024 * 1024); // 256 seems OK if we parse outside the coro
threadsCreated++;
}
private void setInitialOzThread() {
CURRENT_OZ_THREAD.set(this);
}
public OzProc getProc() {
return proc;
}
public Coroutine getCoroutine() {
return coroutine;
}
public String getStatus() {
return status;
}
@Override
public void run() {
setInitialOzThread();
threadsRunnable++;
try {
proc.rootCall("Thread.create");
} finally {
threadsRunnable
status = "terminated";
}
}
@TruffleBoundary
public void yield(Node currentNode) {
status = "blocked";
if (Options.STACKTRACE_ON_INTERRUPT) {
BACKTRACES.put(this, OzBacktrace.capture(currentNode));
}
Coroutine.yield();
status = "runnable";
}
public void suspend(Node currentNode) {
threadsRunnable
yield(currentNode);
threadsRunnable++;
}
} |
/**
* Created at: 12:17:19 AM Jul 18, 2013
*
* @author TanNhat Project: wcrawler
*/
package wcrawler.core;
import com.google.common.util.concurrent.RateLimiter;
import java.net.URL;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.apache.log4j.Logger;
import wcrawler._interface.IDomainRateLimiter;
public class DomainRateLimiter implements IDomainRateLimiter {
private static Logger _logger = Logger.getLogger(DomainRateLimiter.class);
private long defaultMinCrawlDelayInMilliSecs;
private ConcurrentHashMap<String, RateLimiter> _rateLimiterLookup;
public DomainRateLimiter(long defaultMinCrawlDelayInMilliSecs) {
if(defaultMinCrawlDelayInMilliSecs < 0){
throw new ArithmeticException("defaultMinCrawlDelayInMilliSecs");
}
this.defaultMinCrawlDelayInMilliSecs = defaultMinCrawlDelayInMilliSecs;
_rateLimiterLookup = new ConcurrentHashMap<String, RateLimiter>();
}
// Implement rate limit, by getting the "ratelimiter" object from _rateLimiterLookup
@Override
public void rateLimit(URL url) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
// Add a domain and its crawl-delay value into a concurrenthashmap
@Override
public void addDomain(URL url, long minCrawlDelayInMilliSecs) {
// Check valid of input first
if(url == null)
throw new NullPointerException("url");
if(minCrawlDelayInMilliSecs < 1)
throw new ArithmeticException("minCrawlDelayInMilliSecs");
// Get the greater value of new crawl-delay value or default crawl-delay value
long millThatIsGreater = minCrawlDelayInMilliSecs > defaultMinCrawlDelayInMilliSecs ? minCrawlDelayInMilliSecs : defaultMinCrawlDelayInMilliSecs;
// Create rateLimiter with that value
RateLimiter rateLimiter = RateLimiter.create(millThatIsGreater);
// Add to _rateLimiterLookup. Each url will has its own rateLimiter
_rateLimiterLookup.put(url.getAuthority(), rateLimiter);
_logger.debug("Add Domain \""+url.getAuthority()+"\" and RateLimiter with value "+millThatIsGreater);
}
} |
package functionaltests;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.objectweb.proactive.api.PAActiveObject;
import org.objectweb.proactive.api.PAFuture;
import org.objectweb.proactive.core.ProActiveException;
import org.objectweb.proactive.core.ProActiveTimeoutException;
import org.objectweb.proactive.core.config.CentralPAPropertyRepository;
import org.objectweb.proactive.core.node.Node;
import org.objectweb.proactive.core.node.NodeException;
import org.objectweb.proactive.core.node.NodeFactory;
import org.objectweb.proactive.core.node.StartNode;
import org.objectweb.proactive.core.process.JVMProcess;
import org.objectweb.proactive.core.process.JVMProcessImpl;
import org.objectweb.proactive.core.util.ProActiveInet;
import org.objectweb.proactive.utils.OperatingSystem;
import org.ow2.proactive.authentication.crypto.CredData;
import org.ow2.proactive.authentication.crypto.Credentials;
import org.ow2.proactive.resourcemanager.RMFactory;
import org.ow2.proactive.resourcemanager.authentication.RMAuthentication;
import org.ow2.proactive.resourcemanager.common.event.RMEventType;
import org.ow2.proactive.resourcemanager.common.event.RMNodeEvent;
import org.ow2.proactive.resourcemanager.core.properties.PAResourceManagerProperties;
import org.ow2.proactive.resourcemanager.frontend.RMConnection;
import org.ow2.proactive.resourcemanager.frontend.ResourceManager;
import org.ow2.proactive.resourcemanager.nodesource.NodeSource;
import org.ow2.proactive.resourcemanager.nodesource.infrastructure.LocalInfrastructure;
import org.ow2.proactive.resourcemanager.nodesource.policy.StaticPolicy;
import org.ow2.proactive.resourcemanager.utils.RMNodeStarter;
import org.ow2.proactive.utils.FileToBytesConverter;
import org.ow2.tests.ProActiveSetup;
import functionaltests.common.CommonTUtils;
import functionaltests.common.InputStreamReaderThread;
import functionaltests.monitor.RMMonitorEventReceiver;
import functionaltests.monitor.RMMonitorsHandler;
import org.ow2.tests.ProcessCleaner;
/**
*
* Static helpers for Resource Manager functional tests.
* It provides waiters methods that check correct event dispatching.
*
* @author ProActive team
*
*/
public class RMTHelper {
/**
* Number of nodes deployed with default deployment descriptor
*/
private static final int defaultNodesNumber = 2;
// default RMI port
// do not use the one from proactive config to be able to
// keep the RM running after the test with rmi registry is killed
public static final int PA_RMI_PORT = 1199;
/**
* Timeout for local infrastructure
*/
public static final int defaultNodesTimeout = 60 * 1000; //60s
public static final URL functionalTestRMProperties = RMTHelper.class
.getResource("/functionaltests/config/functionalTRMProperties.ini");
protected RMMonitorsHandler monitorsHandler;
protected RMMonitorEventReceiver eventReceiver;
protected ResourceManager resourceManager;
protected RMAuthentication auth;
private Process rmProcess;
final protected static ProActiveSetup setup = new ProActiveSetup();
/**
* Default user name for RM's connection
*/
public static String defaultUserName = "test_executor";
/**
* Default password for RM's connection
*/
public static String defaultUserPassword = "pwd";
/**
* Currently connected user name for RM's connection
*/
public static String connectedUserName = null;
/**
* Currently connected password for RM's connection
*/
public static String connectedUserPassword = null;
public static Credentials connectedUserCreds = null;
private static RMTHelper defaultInstance = new RMTHelper();
public static RMTHelper getDefaultInstance() {
return defaultInstance;
}
/**
* Log a String for tests.
*
* @param s String to log
*/
public static void log(String s) {
System.out.println("
}
/**
* Creates a Local node source
* @throws Exception
*/
public void createNodeSource() throws Exception {
createNodeSource(this.getClass().getSimpleName());
}
/**
* Creates a Local node source with specified name
* @throws Exception
* @return expected number of nodes
*/
public int createNodeSource(String name) throws Exception {
createNodeSource(name, RMTHelper.defaultNodesNumber);
return RMTHelper.defaultNodesNumber;
}
/**
* Creates a Local node source with specified name
*/
public void createNodeSource(String name, int nodeNumber) throws Exception {
RMFactory.setOsJavaProperty();
ResourceManager rm = getResourceManager();
System.out.println("Creating a node source " + name);
//first emtpy im parameter is default rm url
byte[] creds = FileToBytesConverter.convertFileToByteArray(new File(PAResourceManagerProperties
.getAbsolutePath(PAResourceManagerProperties.RM_CREDS.getValueAsString())));
rm.createNodeSource(name, LocalInfrastructure.class.getName(), new Object[] {
creds,
nodeNumber,
RMTHelper.defaultNodesTimeout,
setup.getJvmParameters() + " " + CentralPAPropertyRepository.PA_RMI_PORT.getCmdLine() +
RMTHelper.PA_RMI_PORT }, StaticPolicy.class.getName(), null);
rm.setNodeSourcePingFrequency(5000, name);
waitForNodeSourceCreation(name, nodeNumber);
}
/** Wait for the node source to be created when the node source is empty */
public void waitForNodeSourceCreation(String name) {
waitForNodeSourceCreation(name, 0);
}
/** Wait for the node source to be created and the nodes to be connected */
public void waitForNodeSourceCreation(String name, int nodeNumber) {
waitForNodeSourceEvent(RMEventType.NODESOURCE_CREATED, name);
for (int i = 0; i < nodeNumber; i++) {
waitForAnyNodeEvent(RMEventType.NODE_ADDED);
waitForAnyNodeEvent(RMEventType.NODE_REMOVED);
waitForAnyNodeEvent(RMEventType.NODE_ADDED);
waitForAnyNodeEvent(RMEventType.NODE_STATE_CHANGED);
}
}
/**
* Create a ProActive Node in a new JVM on the local host
* This method can be used to test adding nodes mechanism
* with already deploy ProActive nodes.
* @param nodeName node's name to create
* @return created node object
* @throws IOException if the external JVM cannot be created
* @throws NodeException if lookup of the new node fails.
*/
public TNode createNode(String nodeName) throws IOException, NodeException {
return createNode(nodeName, null);
}
public TNode createNode(String nodeName, Map<String, String> vmParameters) throws IOException,
NodeException {
return createNode(nodeName, null, vmParameters, null);
}
public List<TNode> createNodes(final String nodeName, int number) throws IOException, NodeException,
ExecutionException, InterruptedException {
ExecutorService executorService = Executors.newFixedThreadPool(number);
ArrayList<Future<TNode>> futureNodes = new ArrayList<Future<TNode>>(number);
for (int i = 0; i < number; i++) {
final int index = i;
futureNodes.add(executorService.submit(new Callable<TNode>() {
@Override
public TNode call() {
try {
return createNode(nodeName + index, null);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}));
}
ArrayList nodes = new ArrayList(number);
for (int i = 0; i < number; i++) {
nodes.add(futureNodes.get(i).get());
}
return nodes;
}
// TODO REMOVE THIS METHOD
public void createNodeSource(int rmiPort, int nodesNumber) throws Exception {
createNodeSource(rmiPort, nodesNumber, null);
}
// TODO REMOVE THIS METHOD
public void createNodeSource(int rmiPort, int nodesNumber, List<String> vmOptions) throws Exception {
Map<String, String> map = new HashMap<String, String>();
map.put(CentralPAPropertyRepository.PA_RMI_PORT.getName(), String.valueOf(rmiPort));
map
.put(CentralPAPropertyRepository.PA_HOME.getName(), CentralPAPropertyRepository.PA_HOME
.getValue());
for (int i = 0; i < nodesNumber; i++) {
String nodeName = "node-" + i;
String nodeUrl = "rmi://localhost:" + rmiPort + "/" + nodeName;
createNode(nodeName, nodeUrl, map, vmOptions);
getResourceManager().addNode(nodeUrl);
}
waitForNodeSourceEvent(RMEventType.NODESOURCE_CREATED, NodeSource.DEFAULT);
for (int i = 0; i < nodesNumber; i++) {
waitForAnyNodeEvent(RMEventType.NODE_STATE_CHANGED);
}
}
/**
* Create a ProActive Node in a new JVM on the local host
* with specific java parameters.
* This method can be used to test adding nodes mechanism
* with already deploy ProActive nodes.
* @param nodeName node's name to create
* @param vmParameters an HashMap containing key and value String
* of type :-Dkey=value
* @return created node object
* @throws IOException if the external JVM cannot be created
* @throws NodeException if lookup of the new node fails.
*/
public TNode createNode(String nodeName, String expectedUrl, Map<String, String> vmParameters,
List<String> vmOptions) throws IOException, NodeException {
JVMProcessImpl nodeProcess = createJvmProcess(StartNode.class.getName(), Arrays.asList(nodeName),
vmParameters, vmOptions);
return createNode(nodeName, expectedUrl, nodeProcess);
}
public static TNode createRMNodeStarterNode(String nodeName) throws IOException, NodeException {
JVMProcessImpl nodeProcess = createJvmProcess(RMNodeStarter.class.getName(), Arrays.asList("-n",
nodeName, "-r", getLocalUrl(), "-Dproactive.net.nolocal=false"), null, null);
return createNode(nodeName, null, nodeProcess);
}
public static TNode createNode(String nodeName, String expectedUrl, JVMProcess nodeProcess)
throws IOException, NodeException {
if (expectedUrl == null) {
expectedUrl = "rmi://" + ProActiveInet.getInstance().getHostname() + ":" + PA_RMI_PORT + "/" +
nodeName;
}
try {
Node newNode = null;
final long NODE_START_TIMEOUT_IN_MS = 60000;
long startTimeStamp = System.currentTimeMillis();
NodeException toThrow = null;
while ((System.currentTimeMillis() - startTimeStamp) < NODE_START_TIMEOUT_IN_MS) {
try {
newNode = NodeFactory.getNode(expectedUrl);
} catch (NodeException e) {
toThrow = e;
//nothing, wait another loop
}
if (newNode != null) {
return new TNode(nodeProcess, newNode);
} else {
Thread.sleep(100);
}
}
throw toThrow == null ? new NodeException("unable to create the node " + nodeName) : toThrow;
} catch (InterruptedException e) {
e.printStackTrace();
return null;
}
}
public static JVMProcessImpl createJvmProcess(String className, List<String> parameters,
Map<String, String> vmParameters, List<String> vmOptions) throws IOException {
JVMProcessImpl nodeProcess = new JVMProcessImpl(
new org.objectweb.proactive.core.process.AbstractExternalProcess.StandardOutputMessageLogger());
nodeProcess.setClassname(className);
ArrayList<String> jvmParameters = new ArrayList<String>();
if (vmParameters == null) {
vmParameters = new HashMap<String, String>();
}
if (!vmParameters.containsKey(CentralPAPropertyRepository.PA_RMI_PORT.getName())) {
vmParameters.put(CentralPAPropertyRepository.PA_RMI_PORT.getName(), String.valueOf(PA_RMI_PORT));
}
if (!vmParameters.containsKey(CentralPAPropertyRepository.PA_HOME.getName())) {
vmParameters.put(CentralPAPropertyRepository.PA_HOME.getName(),
CentralPAPropertyRepository.PA_HOME.getValue());
}
if (!vmParameters.containsKey(PAResourceManagerProperties.RM_HOME.getKey())) {
vmParameters.put(PAResourceManagerProperties.RM_HOME.getKey(),
PAResourceManagerProperties.RM_HOME.getValueAsString());
}
for (Entry<String, String> entry : vmParameters.entrySet()) {
if (!entry.getKey().equals("") && !entry.getValue().equals("")) {
jvmParameters.add("-D" + entry.getKey() + "=" + entry.getValue());
}
}
if (vmOptions != null) {
jvmParameters.addAll(vmOptions);
}
jvmParameters.addAll(setup.getJvmParametersAsList());
nodeProcess.setJvmOptions(jvmParameters);
nodeProcess.setParameters(parameters);
nodeProcess.startProcess();
return nodeProcess;
}
/**
* Start the RM using a forked JVM
*
* @param configurationFile the RM's configuration file to use (default is functionalTSchedulerProperties.ini)
* null to use the default one.
* @throws Exception if an error occurs.
*/
public String startRM(String configurationFile, int rmiPort, String... jvmArgs) throws Exception {
if (configurationFile == null) {
configurationFile = new File(functionalTestRMProperties.toURI()).getAbsolutePath();
}
PAResourceManagerProperties.updateProperties(configurationFile);
List<String> commandLine = new ArrayList<String>();
commandLine.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
commandLine.add("-Djava.security.manager");
String proactiveHome = CentralPAPropertyRepository.PA_HOME.getValue();
if (!CentralPAPropertyRepository.PA_HOME.isSet()) {
proactiveHome = PAResourceManagerProperties.RM_HOME.getValueAsString();
}
commandLine.add(CentralPAPropertyRepository.PA_HOME.getCmdLine() + proactiveHome);
String securityPolicy = CentralPAPropertyRepository.JAVA_SECURITY_POLICY.getValue();
if (!CentralPAPropertyRepository.JAVA_SECURITY_POLICY.isSet()) {
securityPolicy = PAResourceManagerProperties.RM_HOME.getValueAsString() +
"/config/security.java.policy-server";
}
commandLine.add(CentralPAPropertyRepository.JAVA_SECURITY_POLICY.getCmdLine() + securityPolicy);
String log4jConfiguration = CentralPAPropertyRepository.LOG4J.getValue();
if (!CentralPAPropertyRepository.LOG4J.isSet()) {
log4jConfiguration = RMTHelper.class.getResource("/log4j-junit").toString();
}
commandLine.add(CentralPAPropertyRepository.LOG4J.getCmdLine() + log4jConfiguration);
commandLine.add(PAResourceManagerProperties.RM_HOME.getCmdLine() +
PAResourceManagerProperties.RM_HOME.getValueAsString());
commandLine.add(CentralPAPropertyRepository.PA_RUNTIME_PING.getCmdLine() + false);
commandLine.add("-cp");
commandLine.add(testClasspath());
commandLine.add(CentralPAPropertyRepository.PA_TEST.getCmdLine() + "true");
commandLine.add("-Djava.awt.headless=true"); // For Mac builds
commandLine.add(CentralPAPropertyRepository.PA_RMI_PORT.getCmdLine() + rmiPort);
Collections.addAll(commandLine, jvmArgs);
commandLine.add(RMTStarter.class.getName());
commandLine.add(configurationFile);
System.out.println("Starting RM process: " + commandLine);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
processBuilder.redirectErrorStream(true);
rmProcess = processBuilder.start();
InputStreamReaderThread outputReader = new InputStreamReaderThread(rmProcess.getInputStream(),
"[RM VM output]: ");
outputReader.start();
String url = getLocalUrl(rmiPort);
System.out.println("Waiting for the RM using URL: " + url);
auth = RMConnection.waitAndJoin(url);
return url;
}
public static String testClasspath() {
String home = PAResourceManagerProperties.RM_HOME.getValueAsString();
String classpathToLibFolderWithWildcard = home + File.separator + "dist" + File.separator + "lib" +
File.separator + "*";
if (OperatingSystem.getOperatingSystem().equals(OperatingSystem.windows)) {
// required by windows otherwise wildcard is expanded
classpathToLibFolderWithWildcard = "\"" + classpathToLibFolderWithWildcard + "\"";
}
return classpathToLibFolderWithWildcard;
}
/**
* Returns the alive Nodes accessible by the RM
* @return list of ProActive Nodes
*/
public List<Node> listAliveNodes() throws Exception {
ArrayList<Node> nodes = new ArrayList<Node>();
Set<String> urls = getResourceManager().listAliveNodeUrls();
for (String url : urls) {
nodes.add(NodeFactory.getNode(url));
}
return nodes;
}
/**
* Returns the alive Nodes accessible by the RM in the given node sources
* @param nodeSourceNames
* @return list of ProActive Nodes
*/
public List<Node> listAliveNodes(Set<String> nodeSourceNames) throws Exception {
ArrayList<Node> nodes = new ArrayList<Node>();
Set<String> urls = getResourceManager().listAliveNodeUrls(nodeSourceNames);
for (String url : urls) {
nodes.add(NodeFactory.getNode(url));
}
return nodes;
}
/**
* Returns the list of alive Nodes
* @return list of ProActive Nodes urls
*/
public Set<String> listAliveNodesUrls() throws Exception {
return getResourceManager().listAliveNodeUrls();
}
/**
* Returns the list of alive Nodes in the given nodeSources
* @param nodeSourceNames
* @return list of ProActive Nodes urls
*/
public Set<String> listAliveNodesUrls(Set<String> nodeSourceNames) throws Exception {
return getResourceManager().listAliveNodeUrls(nodeSourceNames);
}
/**
* Stop the Resource Manager if exists.
* @throws Exception
* @throws ProActiveException
*/
public void killRM() throws Exception {
if (rmProcess != null) {
rmProcess.destroy();
rmProcess.waitFor();
rmProcess = null;
// killing all rm nodes
ProcessCleaner cleaner = new ProcessCleaner(".*RMNodeStarter.*");
cleaner.killAliveProcesses();
// sometimes RM_NODE object isn't removed from the RMI registry after JVM with RM is killed (SCHEDULING-1498)
CommonTUtils.cleanupRMActiveObjectRegistry();
}
reset();
}
/**
* Resets the RMTHelper
*/
public void reset() throws Exception {
auth = null;
resourceManager = null;
eventReceiver = null;
}
/**
* Wait for an event regarding Scheduler state : started, resumed, stopped...
* If a corresponding event has been already thrown by scheduler, returns immediately,
* otherwise wait for reception of the corresponding event.
* @param event awaited event.
*/
public void waitForRMStateEvent(RMEventType event) {
try {
waitForRMStateEvent(event, 0);
} catch (ProActiveTimeoutException e) {
//unreachable block, 0 means infinite, no timeout
//log sthing ?
}
}
/**
* Wait for an event regarding RM state : started, resumed, stopped...
* If a corresponding event has been already thrown by scheduler, returns immediately,
* otherwise wait for reception of the corresponding event.
* @param eventType awaited event.
* @param timeout in milliseconds
* @throws ProActiveTimeoutException if timeout is reached
*/
public void waitForRMStateEvent(RMEventType eventType, long timeout) throws ProActiveTimeoutException {
getMonitorsHandler().waitForRMStateEvent(eventType, timeout);
}
/**
* Wait for an event regarding node sources: created, removed....
* If a corresponding event has been already thrown by RM, returns immediately,
* otherwise wait for reception of the corresponding event.
* @param nodeSourceEvent awaited event.
* @param nodeSourceName corresponding node source name for which an event is awaited.
*/
public void waitForNodeSourceEvent(RMEventType nodeSourceEvent, String nodeSourceName) {
try {
waitForNodeSourceEvent(nodeSourceEvent, nodeSourceName, 0);
} catch (ProActiveTimeoutException e) {
//unreachable block, 0 means infinite, no timeout
//log sthing ?
}
}
/**
* Wait for an event regarding node sources: created, removed....
* If a corresponding event has been already thrown by RM, returns immediately,
* otherwise wait for reception of the corresponding event.
* @param eventType awaited event.
* @param nodeSourceName corresponding node source name for which an event is awaited.
* @param timeout in milliseconds
* @throws ProActiveTimeoutException if timeout is reached
*/
public void waitForNodeSourceEvent(RMEventType eventType, String nodeSourceName, long timeout)
throws ProActiveTimeoutException {
getMonitorsHandler().waitForNodesourceEvent(eventType, nodeSourceName, timeout);
}
/**
* Wait for an event on a specific node : created, removed....
* If a corresponding event has been already thrown by RM, returns immediately,
* otherwise wait for reception of the corresponding event.
* @param nodeEvent awaited event.
* @param nodeUrl Url's of the node for which a new state is awaited.
* @return RMNodeEvent object received by event receiver.
*/
public RMNodeEvent waitForNodeEvent(RMEventType nodeEvent, String nodeUrl) {
try {
return waitForNodeEvent(nodeEvent, nodeUrl, 0);
} catch (ProActiveTimeoutException e) {
//unreachable block, 0 means infinite, no timeout
//log string ?
return null;
}
}
/**
* Wait for an event on a specific node : created, removed....
* If a corresponding event has been already thrown by RM, returns immediately,
* otherwise wait for reception of the corresponding event.
* @param eventType awaited event.
* @param nodeUrl Url's of the node for which a new state is awaited
* @param timeout in milliseconds
* @return RMNodeEvent object received by event receiver.
* @throws ProActiveTimeoutException if timeout is reached
*/
public RMNodeEvent waitForNodeEvent(RMEventType eventType, String nodeUrl, long timeout)
throws ProActiveTimeoutException {
return getMonitorsHandler().waitForNodeEvent(eventType, nodeUrl, timeout);
}
/**
* Wait for an event on any node: added, removed....
* If a corresponding event has been already thrown by RM, returns immediately,
* otherwise wait for reception of the corresponding event.
* @param eventType awaited event.
* @return RMNodeEvent object received by event receiver.
*/
public RMNodeEvent waitForAnyNodeEvent(RMEventType eventType) {
try {
return waitForAnyNodeEvent(eventType, 0);
} catch (ProActiveTimeoutException e) {
//unreachable block, 0 means infinite, no timeout
//log sthing ?
return null;
}
}
/**
* Kills the node with specified url
* @param url of the node
* @throws NodeException if node cannot be looked up
*/
public void killNode(String url) throws NodeException {
Node node = NodeFactory.getNode(url);
try {
node.getProActiveRuntime().killRT(false);
} catch (Exception ignored) {
}
}
/**
* Wait for an event on any node: added, removed....
* If a corresponding event has been already thrown by RM, returns immediately,
* otherwise wait for reception of the corresponding event.
* @param eventType awaited event.
* @param timeout in milliseconds
* @return RMNodeEvent object received by event receiver.
* @throws ProActiveTimeoutException if timeout is reached
*/
public RMNodeEvent waitForAnyNodeEvent(RMEventType eventType, long timeout)
throws ProActiveTimeoutException {
return getMonitorsHandler().waitForAnyNodeEvent(eventType, timeout);
}
//private methods
private void initEventReceiver() throws Exception {
RMMonitorsHandler mHandler = getMonitorsHandler();
if (eventReceiver == null) {
/** create event receiver then turnActive to avoid deepCopy of MonitorsHandler object
* (shared instance between event receiver and static helpers).
*/
System.out.println("Initializing new event receiver");
RMMonitorEventReceiver passiveEventReceiver = new RMMonitorEventReceiver(mHandler);
eventReceiver = (RMMonitorEventReceiver) PAActiveObject.turnActive(passiveEventReceiver);
PAFuture.waitFor(resourceManager.getMonitoring().addRMEventListener(eventReceiver));
}
}
/**
* Gets the connected ResourceManager interface.
*/
public ResourceManager getResourceManager() throws Exception {
return getResourceManager(null, defaultUserName, defaultUserPassword);
}
/**
* Idem than getResourceManager but allow to specify a propertyFile
* @return the resource manager
* @throws Exception
*/
public ResourceManager getResourceManager(String propertyFile, String user, String pass) throws Exception {
if (user == null)
user = defaultUserName;
if (pass == null)
pass = defaultUserPassword;
if (resourceManager == null || !user.equals(connectedUserName)) {
if (resourceManager != null) {
System.out.println("Disconnecting user " + connectedUserName + " from the resource manager");
try {
resourceManager.getMonitoring().removeRMEventListener();
resourceManager.disconnect().getBooleanValue();
eventReceiver = null;
resourceManager = null;
} catch (RuntimeException ex) {
ex.printStackTrace();
}
}
if (auth == null) {
try {
// trying to connect to the existing RM first
auth = RMConnection.waitAndJoin(getLocalUrl(PA_RMI_PORT), 1);
System.out.println("Connected to the RM on " + getLocalUrl(PA_RMI_PORT));
} catch (Exception e) {
// creating a new RM and default node source
startRM(propertyFile, PA_RMI_PORT);
}
}
authentificate(user, pass);
initEventReceiver();
System.out.println("RMTHelper is connected");
}
return resourceManager;
}
public static String getLocalUrl(int rmiPort) {
return "rmi://localhost:" + rmiPort + "/";
}
public static String getLocalUrl() {
return getLocalUrl(PA_RMI_PORT);
}
private void authentificate(String user, String pass) throws Exception {
connectedUserName = user;
connectedUserPassword = pass;
connectedUserCreds = Credentials.createCredentials(new CredData(CredData.parseLogin(user), CredData
.parseDomain(connectedUserName), pass), auth.getPublicKey());
System.out.println("Authentificating as user " + user);
resourceManager = auth.login(connectedUserCreds);
}
public RMMonitorsHandler getMonitorsHandler() {
if (monitorsHandler == null) {
monitorsHandler = new RMMonitorsHandler();
}
return monitorsHandler;
}
public RMMonitorEventReceiver getEventReceiver() {
return eventReceiver;
}
public RMAuthentication getRMAuth() throws Exception {
if (auth == null) {
getResourceManager();
}
return auth;
}
} |
package eu.dzhw.fdz.metadatamanagement.datasetmanagement.service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import org.javers.core.Javers;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Order;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.gridfs.GridFsCriteria;
import org.springframework.data.mongodb.gridfs.GridFsOperations;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import com.mongodb.client.gridfs.model.GridFSFile;
import eu.dzhw.fdz.metadatamanagement.common.domain.I18nString;
import eu.dzhw.fdz.metadatamanagement.common.domain.ShadowCopyCreateNotAllowedException;
import eu.dzhw.fdz.metadatamanagement.common.domain.ShadowCopyDeleteNotAllowedException;
import eu.dzhw.fdz.metadatamanagement.common.service.AttachmentMetadataHelper;
import eu.dzhw.fdz.metadatamanagement.datasetmanagement.domain.DataSet;
import eu.dzhw.fdz.metadatamanagement.datasetmanagement.domain.DataSetAttachmentMetadata;
import eu.dzhw.fdz.metadatamanagement.datasetmanagement.repository.DataSetRepository;
import eu.dzhw.fdz.metadatamanagement.datasetmanagement.service.helper.DataSetAttachmentFilenameBuilder;
import eu.dzhw.fdz.metadatamanagement.projectmanagement.domain.DataAcquisitionProject;
import eu.dzhw.fdz.metadatamanagement.usermanagement.security.SecurityUtils;
import lombok.RequiredArgsConstructor;
@Service
@RequiredArgsConstructor
public class DataSetAttachmentService {
private final GridFsOperations operations;
private final MongoTemplate mongoTemplate;
private final Javers javers;
private final DataSetRepository dataSetRepository;
private final AttachmentMetadataHelper<DataSetAttachmentMetadata> attachmentMetadataHelper;
/**
* Save the attachment for a data set.
*
* @param metadata The metadata of the attachment.
* @return The GridFs filename.
* @throws IOException thrown when the input stream is not closable
*/
public String createDataSetAttachment(MultipartFile multipartFile,
DataSetAttachmentMetadata metadata) throws IOException {
if (metadata.isShadow()) {
throw new ShadowCopyCreateNotAllowedException();
}
String currentUser = SecurityUtils.getCurrentUserLogin();
attachmentMetadataHelper.initAttachmentMetadata(metadata, currentUser);
metadata.generateId();
metadata.setMasterId(metadata.getId());
String filename = DataSetAttachmentFilenameBuilder.buildFileName(metadata);
attachmentMetadataHelper.writeAttachmentMetadata(multipartFile, filename, metadata,
currentUser);
return filename;
}
/**
* Update the metadata of the attachment.
*
* @param metadata The new metadata.
*/
public void updateAttachmentMetadata(DataSetAttachmentMetadata metadata) {
String filePath = DataSetAttachmentFilenameBuilder.buildFileName(metadata.getDataSetId(),
metadata.getFileName());
attachmentMetadataHelper.updateAttachmentMetadata(metadata, filePath);
}
/**
* Delete all attachments of the given dataSet.
*
* @param dataSetId the id of the dataSet.
*/
public void deleteAllByDataSetId(String dataSetId) {
String currentUser = SecurityUtils.getCurrentUserLogin();
Query query = new Query(GridFsCriteria.whereFilename().regex(
"^" + Pattern.quote(DataSetAttachmentFilenameBuilder.buildFileNamePrefix(dataSetId))));
Iterable<GridFSFile> files = this.operations.find(query);
files.forEach(file -> {
DataSetAttachmentMetadata metadata =
mongoTemplate.getConverter().read(DataSetAttachmentMetadata.class, file.getMetadata());
if (metadata.isShadow()) {
throw new ShadowCopyDeleteNotAllowedException();
}
javers.commitShallowDelete(currentUser, metadata);
});
this.operations.delete(query);
}
/**
* Load all metadata objects from gridfs (ordered by indexInDataSet).
*
* @param dataSetId The id of the dataSet.
* @return A list of metadata.
*/
public List<DataSetAttachmentMetadata> findAllByDataSet(String dataSetId) {
Query query = new Query(GridFsCriteria.whereFilename().regex(
"^" + Pattern.quote(DataSetAttachmentFilenameBuilder.buildFileNamePrefix(dataSetId))));
query.with(Sort.by(Sort.Direction.ASC, "metadata.indexInDataSet"));
Iterable<GridFSFile> files = this.operations.find(query);
List<DataSetAttachmentMetadata> result = new ArrayList<>();
files.forEach(gridfsFile -> {
result.add(mongoTemplate.getConverter().read(DataSetAttachmentMetadata.class,
gridfsFile.getMetadata()));
});
return result;
}
/**
* Load all metadata objects from gridfs (ordered by dataSetNumber and indexInDataSet).
*
* @param dataAcquisitionProjectId The id of the {@link DataAcquisitionProject}.
* @return A list of metadata.
*/
public List<DataSetAttachmentMetadata> findAllByProject(String dataAcquisitionProjectId) {
Query query = new Query(GridFsCriteria.whereFilename()
.regex(DataSetAttachmentFilenameBuilder.ALL_DATASET_ATTACHMENTS).andOperator(
GridFsCriteria.whereMetaData("dataAcquisitionProjectId").is(dataAcquisitionProjectId)));
query.with(
Sort.by(Order.asc("metadata.dataSetNumber"), Order.asc("metadata.indexInDataSet")));
Iterable<GridFSFile> files = this.operations.find(query);
List<DataSetAttachmentMetadata> result = new ArrayList<>();
AtomicInteger countByDataSetNumber = new AtomicInteger(0);
AtomicInteger currentDataSetNumber = new AtomicInteger(-1);
files.forEach(gridfsFile -> {
Integer dataSetNumber = gridfsFile.getMetadata().getInteger("dataSetNumber");
if (!dataSetNumber.equals(currentDataSetNumber.get())) {
currentDataSetNumber.set(dataSetNumber);
currentDataSetNumber.set(0);
} else {
currentDataSetNumber.incrementAndGet();
}
gridfsFile.getMetadata().put("indexInDataSet", countByDataSetNumber.get());
result.add(mongoTemplate.getConverter().read(DataSetAttachmentMetadata.class,
gridfsFile.getMetadata()));
});
return result;
}
/**
* Delete all attachments of all dataSets.
*/
public void deleteAll() {
String currentUser = SecurityUtils.getCurrentUserLogin();
Query query = new Query(GridFsCriteria.whereFilename()
.regex("^" + Pattern.quote("/data-sets/") + ".*" + Pattern.quote("/attachments/")));
Iterable<GridFSFile> files = this.operations.find(query);
files.forEach(file -> {
DataSetAttachmentMetadata metadata =
mongoTemplate.getConverter().read(DataSetAttachmentMetadata.class, file.getMetadata());
if (metadata.isShadow()) {
throw new ShadowCopyDeleteNotAllowedException();
}
javers.commitShallowDelete(currentUser, metadata);
});
this.operations.delete(query);
}
/**
* Delete the attachment and its metadata from gridfs.
*
* @param dataSetId The id of the dataSet.
* @param filename The filename of the attachment.
*/
public void deleteByDataSetIdAndFilename(String dataSetId, String filename) {
Query fileQuery = new Query(GridFsCriteria.whereFilename()
.is(DataSetAttachmentFilenameBuilder.buildFileName(dataSetId, filename)));
GridFSFile file = this.operations.findOne(fileQuery);
if (file == null) {
return;
}
DataSetAttachmentMetadata metadata =
mongoTemplate.getConverter().read(DataSetAttachmentMetadata.class, file.getMetadata());
if (metadata.isShadow()) {
throw new ShadowCopyDeleteNotAllowedException();
}
String currentUser = SecurityUtils.getCurrentUserLogin();
this.operations.delete(fileQuery);
javers.commitShallowDelete(currentUser, metadata);
}
/**
* Attach the given file as data set report to the data set.
*
* @param language The language of the report.
* @param dataSetId The id of a {@link DataSet}.
* @param reportFile The pdf file.
* @throws IOException Thrown if the multipart file cannot be read.
*/
public void attachDataSetReport(String dataSetId, String language, MultipartFile reportFile)
throws IOException {
DataSet dataSet = dataSetRepository.findById(dataSetId).get();
DataSetAttachmentMetadata metadata = null;
// create a second metadata object using the old naming scheme to allow deletion of files
// that have been attached before the naming scheme change
DataSetAttachmentMetadata oldFilenameMetadata = null;
switch (language) {
case "de":
metadata = DataSetAttachmentMetadata.builder().dataSetId(dataSetId)
.dataAcquisitionProjectId(dataSet.getDataAcquisitionProjectId())
.dataSetNumber(dataSet.getNumber())
.fileName(dataSet.getDataAcquisitionProjectId() + "-ds"
+ dataSet.getNumber() + "_DsReport_de.pdf")
.title("Datensatzreport:\n" + dataSet.getDescription().getDe())
.description(new I18nString(
"Codebook/Variablenreport/Datensatzreport von \"" + dataSet.getDescription().getDe()
+ "\"",
"Codebook/Variable Report/Dataset Report of \"" + dataSet.getDescription().getEn()
+ "\""))
.language("de").indexInDataSet(0).build();
oldFilenameMetadata = DataSetAttachmentMetadata.builder().dataSetId(dataSetId)
.dataAcquisitionProjectId(dataSet.getDataAcquisitionProjectId())
.dataSetNumber(dataSet.getNumber())
.fileName("dsreport-" + dataSet.getDataAcquisitionProjectId() + "-ds"
+ dataSet.getNumber() + ".pdf")
.title("Datensatzreport:\n" + dataSet.getDescription().getDe())
.description(new I18nString(
"Codebook/Variablenreport/Datensatzreport von \"" + dataSet.getDescription().getDe()
+ "\"",
"Codebook/Variable Report/Dataset Report of \"" + dataSet.getDescription().getEn()
+ "\""))
.language("de").indexInDataSet(0).build();
break;
case "en":
metadata = DataSetAttachmentMetadata.builder().dataSetId(dataSetId)
.dataAcquisitionProjectId(dataSet.getDataAcquisitionProjectId())
.dataSetNumber(dataSet.getNumber())
.fileName(dataSet.getDataAcquisitionProjectId() + "-ds"
+ dataSet.getNumber() + "_DsReport_en.pdf")
.title("Dataset Report:\n" + dataSet.getDescription().getEn())
.description(new I18nString(
"Codebook/Variablenreport/Datensatzreport von \"" + dataSet.getDescription().getDe()
+ "\"",
"Codebook/Variable Report/Dataset Report of \"" + dataSet.getDescription().getEn()
+ "\""))
.language("en").indexInDataSet(0).build();
oldFilenameMetadata = DataSetAttachmentMetadata.builder().dataSetId(dataSetId)
.dataAcquisitionProjectId(dataSet.getDataAcquisitionProjectId())
.dataSetNumber(dataSet.getNumber())
.fileName("dsreport-" + dataSet.getDataAcquisitionProjectId() + "-ds"
+ dataSet.getNumber() + "_en.pdf")
.title("Dataset Report:\n" + dataSet.getDescription().getEn())
.description(new I18nString(
"Codebook/Variablenreport/Datensatzreport von \"" + dataSet.getDescription().getDe()
+ "\"",
"Codebook/Variable Report/Dataset Report of \"" + dataSet.getDescription().getEn()
+ "\""))
.language("en").indexInDataSet(0).build();
break;
default:
throw new IllegalArgumentException("Unsupported language '" + language + "'!");
}
deleteByDataSetIdAndFilename(dataSetId, oldFilenameMetadata.getFileName());
deleteByDataSetIdAndFilename(dataSetId, metadata.getFileName());
createDataSetAttachment(reportFile, metadata);
}
} |
package sp.util.function;
import java.util.Objects;
import java.util.function.DoublePredicate;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* <p>
* {@link Throwable} 1 (boolean ) .
* </p>
*
* @author Se-foo
* @param <X>
* .
* @since 0.1
*/
public interface PredicateWithThrown<X extends Throwable> {
/**
* <p>
* 1 (boolean ) .
* </p>
* <p>
* , {@link #test(Object)} .
* </p>
*
* @author Se-foo
* @param <T>
* .
* @param <X>
* .
* @since 0.1
*/
@FunctionalInterface
static interface OfObj<T, X extends Throwable> extends PredicateWithThrown<X> {
/**
* .
*
* @param target
* .
* @return TRUE.
* @throws X
* .
*/
boolean test(T target) throws X;
/**
* .
*
* @return .
* @see #test(Object)
*/
@Override
default PredicateWithThrown.OfObj<T, X> negate() {
return target -> !this.test(target);
}
/**
* <p>
* .
* </p>
* <p>
* FALSE , other .
* </p>
*
* @param other
* .
* @return other .
* @throws NullPointerException
* other NULL .
* @see #test(Object)
*/
default PredicateWithThrown.OfObj<T, X> and(PredicateWithThrown.OfObj<? super T, ? extends X> other) {
Objects.requireNonNull(other);
return target -> this.test(target) && other.test(target);
}
/**
* <p>
* .
* </p>
* <p>
* FALSE , other .
* </p>
*
* @param other
* .
* @return other .
* @throws NullPointerException
* other NULL .
* @see #test(Object)
*/
default PredicateWithThrown.OfObj<T, X> andPredicate(Predicate<? super T> other) {
Objects.requireNonNull(other);
return target -> this.test(target) && other.test(target);
}
/**
* <p>
* .
* </p>
* <p>
* FALSE , other .
* </p>
*
* @param other
* .
* @return other .
* @throws NullPointerException
* other NULL .
* @see #test(Object)
*/
default PredicateWithThrown.OfObj<T, X> or(PredicateWithThrown.OfObj<? super T, ? extends X> other) {
Objects.requireNonNull(other);
return target -> this.test(target) || other.test(target);
}
/**
* <p>
* .
* </p>
* <p>
* FALSE , other .
* </p>
*
* @param other
* .
* @return other .
* @throws NullPointerException
* other NULL .
* @see #test(Object)
*/
default PredicateWithThrown.OfObj<T, X> orPredicate(Predicate<? super T> other) {
Objects.requireNonNull(other);
return target -> this.test(target) || other.test(target);
}
/**
* <p>
* {@link java.util.function.Predicate} .
* </p>
* <p>
* . {@link Throwable}
* throwable , .
* </p>
*
* @param throwable
* .
* @return (boolean ).
* @throws NullPointerException
* NULL, NULL .
* @see #test(Object)
*/
default Predicate<T> toPredicate(Function<? super Throwable, ? extends RuntimeException> throwable) {
Objects.requireNonNull(throwable);
return target -> {
try {
return this.test(target);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw Objects.requireNonNull(throwable.apply(e));
} catch (Error e) {
throw e;
} catch (Throwable e) {
throw Objects.requireNonNull(throwable.apply(e));
}
};
}
/**
* <p>
* {@link java.util.function.Predicate} .
* </p>
* <p>
* . {@link Throwable}
* {@link RuntimeException} .
* </p>
*
* @return (boolean ).
* @see #toPredicate(Function)
*/
default Predicate<T> toPredicate() {
return this.toPredicate(cause -> new RuntimeException(cause));
}
}
/**
* <p>
* 1 (boolean ) .
* </p>
* <p>
* , {@link #test(double)} .
* </p>
*
* @author Se-foo
* @param <X>
* .
* @since 0.1
*/
@FunctionalInterface
static interface OfDouble<X extends Throwable> extends PredicateWithThrown<X> {
/**
* .
*
* @param target
* .
* @return TRUE.
* @throws X
* .
*/
boolean test(double target) throws X;
/**
* .
*
* @return .
* @see #test(double)
*/
@Override
default PredicateWithThrown.OfDouble<X> negate() {
return target -> !this.test(target);
}
/**
* <p>
* .
* </p>
* <p>
* FALSE , other .
* </p>
*
* @param other
* .
* @return other .
* @throws NullPointerException
* other NULL .
* @see #test(double)
*/
default PredicateWithThrown.OfDouble<X> and(PredicateWithThrown.OfDouble<? extends X> other) {
Objects.requireNonNull(other);
return target -> this.test(target) && other.test(target);
}
/**
* <p>
* .
* </p>
* <p>
* FALSE , other .
* </p>
*
* @param other
* .
* @return other .
* @throws NullPointerException
* other NULL .
* @see #test(double)
*/
default PredicateWithThrown.OfDouble<X> andPredicate(DoublePredicate other) {
Objects.requireNonNull(other);
return target -> this.test(target) && other.test(target);
}
/**
* <p>
* .
* </p>
* <p>
* FALSE , other .
* </p>
*
* @param other
* .
* @return other .
* @throws NullPointerException
* other NULL .
* @see #test(double)
*/
default PredicateWithThrown.OfDouble<X> or(PredicateWithThrown.OfDouble<? extends X> other) {
Objects.requireNonNull(other);
return target -> this.test(target) || other.test(target);
}
/**
* <p>
* .
* </p>
* <p>
* FALSE , other .
* </p>
*
* @param other
* .
* @return other .
* @throws NullPointerException
* other NULL .
* @see #test(double)
*/
default PredicateWithThrown.OfDouble<X> orPredicate(DoublePredicate other) {
Objects.requireNonNull(other);
return target -> this.test(target) || other.test(target);
}
/**
* <p>
* {@link java.util.function.DoublePredicate} .
* </p>
* <p>
* . {@link Throwable}
* throwable , .
* </p>
*
* @param throwable
* .
* @return (boolean ).
* @throws NullPointerException
* NULL, NULL .
* @see #test(double)
*/
default DoublePredicate toPredicate(Function<? super Throwable, ? extends RuntimeException> throwable) {
Objects.requireNonNull(throwable);
return target -> {
try {
return this.test(target);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw Objects.requireNonNull(throwable.apply(e));
} catch (Error e) {
throw e;
} catch (Throwable e) {
throw Objects.requireNonNull(throwable.apply(e));
}
};
}
/**
* <p>
* {@link java.util.function.DoublePredicate} .
* </p>
* <p>
* . {@link Throwable}
* {@link RuntimeException} .
* </p>
*
* @return (boolean ).
* @see #toPredicate(Function)
*/
default DoublePredicate toPredicate() {
return this.toPredicate(cause -> new RuntimeException(cause));
}
}
/**
* .
*
* @return .
*/
PredicateWithThrown<X> negate();
} |
/*
* Spring ESPN
*/
package com.infinitemule.espn.api.test.headlines.spring;
import static com.infinitemule.espn.common.lang.Console.printfn;
import static com.infinitemule.espn.common.lang.Console.println;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import com.infinitemule.espn.api.headlines.Headline;
import com.infinitemule.espn.api.headlines.HeadlinesApiRequest;
import com.infinitemule.espn.api.headlines.HeadlinesApiResponse;
import com.infinitemule.espn.api.headlines.spring.HeadlinesApiServiceSpring;
import com.infinitemule.espn.api.test.AbstractApiServiceSpringIntgTest;
public class HeadlinesApiServiceSpringIntgTest extends AbstractApiServiceSpringIntgTest {
@Autowired
private HeadlinesApiServiceSpring srv;
@Test
public void newsAllCities() {
HeadlinesApiRequest req = new HeadlinesApiRequest()
.news().forCities();
output(srv.call(req));
}
@Test
public void headlinesAllCities() {
HeadlinesApiRequest req = new HeadlinesApiRequest()
.headlines().forCities();
output(srv.call(req));
}
private void output(HeadlinesApiResponse response) {
for(Headline headline : response.getHeadlines()) {
println("
printfn("%s (%s)", headline.getHeadline(), headline.getId());
println(" - Description:");
printfn(" %s - %s", headline.getPublished(), headline.getType());
printfn(" %s", headline.getTitle());
printfn(" %s", headline.getDescription());
}
}
} |
package org.apache.derby.impl.sql.execute.operations.joins;
import com.google.common.collect.Maps;
import com.splicemachine.derby.test.framework.*;
import com.splicemachine.homeless.TestUtils;
import org.apache.log4j.Logger;
import org.junit.*;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static com.splicemachine.homeless.TestUtils.o;
public class OuterJoinTest extends SpliceUnitTest {
private static Logger LOG = Logger.getLogger(OuterJoinTest.class);
private static final Map<String, String> tableMap = Maps.newHashMap();
public static final String CLASS_NAME = OuterJoinTest.class.getSimpleName().toUpperCase() + "_2";
public static final String TABLE_NAME_1 = "A";
public static final String TABLE_NAME_2 = "CC";
public static final String TABLE_NAME_3 = "DD";
public static final String TABLE_NAME_4 = "D";
public static final String TABLE_NAME_5 = "E";
public static final String TABLE_NAME_6 = "F";
public static final String TABLE_NAME_7 = "G";
protected static DefaultedSpliceWatcher spliceClassWatcher = new DefaultedSpliceWatcher(CLASS_NAME);
protected static SpliceSchemaWatcher spliceSchemaWatcher = new SpliceSchemaWatcher(CLASS_NAME);
protected static SpliceTableWatcher spliceTableWatcher1 = new SpliceTableWatcher(TABLE_NAME_1, CLASS_NAME, "(si varchar(40),sa character varying(40),sc varchar(40),sd int,se float)");
protected static SpliceTableWatcher spliceTableWatcher2 = new SpliceTableWatcher(TABLE_NAME_2, CLASS_NAME, "(si varchar(40), sa varchar(40))");
protected static SpliceTableWatcher spliceTableWatcher3 = new SpliceTableWatcher(TABLE_NAME_3, CLASS_NAME, "(si varchar(40), sa varchar(40))");
protected static SpliceTableWatcher spliceTableWatcher4 = new SpliceTableWatcher(TABLE_NAME_4, CLASS_NAME, "(a varchar(20), b varchar(20), c varchar(10), d decimal, e varchar(15))");
protected static SpliceTableWatcher spliceTableWatcher5 = new SpliceTableWatcher(TABLE_NAME_5, CLASS_NAME, "(a varchar(20), b varchar(20), w decimal(4),e varchar(15))");
protected static SpliceTableWatcher spliceTableWatcher6 = new SpliceTableWatcher(TABLE_NAME_6, CLASS_NAME, "(a varchar(20), b varchar(20), c varchar(10), d decimal, e varchar(15))");
protected static SpliceTableWatcher spliceTableWatcher7 = new SpliceTableWatcher(TABLE_NAME_7, CLASS_NAME, "(a varchar(20), b varchar(20), w decimal(4),e varchar(15))");
@ClassRule
public static TestRule chain = RuleChain.outerRule(spliceClassWatcher)
.around(spliceSchemaWatcher)
.around(spliceTableWatcher1)
.around(spliceTableWatcher2)
.around(spliceTableWatcher3)
.around(spliceTableWatcher4)
.around(spliceTableWatcher5)
.around(spliceTableWatcher6)
.around(spliceTableWatcher7)
.around(new SpliceDataWatcher() {
@Override
protected void starting(Description description) {
try {
PreparedStatement ps = spliceClassWatcher.prepareStatement(format("insert into %s (si, sa, sc,sd,se) values (?,?,?,?,?)", TABLE_NAME_1));
for (int i = 0; i < 10; i++) {
ps.setString(1, "" + i);
ps.setString(2, "i");
ps.setString(3, "" + i * 10);
ps.setInt(4, i);
ps.setFloat(5, 10.0f * i);
ps.executeUpdate();
}
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
spliceClassWatcher.closeAll();
}
}
}).around(new SpliceDataWatcher() {
@Override
protected void starting(Description description) {
try {
Statement statement = spliceClassWatcher.getStatement();
statement.execute(String.format("insert into %s values ('p1','mxss','design',10000,'deale')", TABLE_NAME_4));
statement.execute(String.format("insert into %s values ('e2','alice',12,'deale')", TABLE_NAME_5));
statement.execute(String.format("insert into %s values ('e3','alice',12,'deale')", TABLE_NAME_5));
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
spliceClassWatcher.closeAll();
}
}
}).around(new SpliceDataWatcher() {
@Override
protected void starting(Description description) {
try {
insertData(TABLE_NAME_2, TABLE_NAME_3, spliceClassWatcher);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
spliceClassWatcher.closeAll();
}
}
}).around(TestUtils.createFileDataWatcher(spliceClassWatcher, "small_msdatasample/startup.sql", CLASS_NAME))
.around(TestUtils.createFileDataWatcher(spliceClassWatcher, "test_data/employee.sql", CLASS_NAME))
.around(TestUtils.createFileDataWatcher(spliceClassWatcher, "test_data/basic_join_dataset.sql", CLASS_NAME));
@Rule
public SpliceWatcher methodWatcher = new DefaultedSpliceWatcher(CLASS_NAME);
public static void insertData(String t1, String t2, SpliceWatcher spliceWatcher) throws Exception {
PreparedStatement psC = spliceWatcher.prepareStatement("insert into " + t1 + " values (?,?)");
PreparedStatement psD = spliceWatcher.prepareStatement("insert into " + t2 + " values (?,?)");
for (int i = 0; i < 10; i++) {
psC.setString(1, "" + i);
psC.setString(2, "i");
psC.executeUpdate();
if (i != 9) {
psD.setString(1, "" + i);
psD.setString(2, "i");
psD.executeUpdate();
}
}
spliceWatcher.commit();
}
// TESTS
@Test
public void testNestedLoopLeftOuterJoin() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select t1.EMPNAME, t1.CITY, t2.PTYPE from STAFF t1 left outer join PROJ t2 --DERBY-PROPERTIES joinStrategy=NESTEDLOOP \n" +
" on t1.CITY = t2.CITY");
List<Map> results = TestUtils.resultSetToMaps(rs);
Assert.assertEquals(11, results.size());
}
@Test
public void testScrollableVarcharLeftOuterJoinWithJoinStrategy() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select cc.si, dd.si from cc left outer join dd --DERBY-PROPERTIES joinStrategy=SORTMERGE \n on cc.si = dd.si");
int j = 0;
while (rs.next()) {
j++;
Assert.assertNotNull(rs.getString(1));
if (!rs.getString(1).equals("9")) {
Assert.assertNotNull(rs.getString(2));
Assert.assertEquals(rs.getString(1), rs.getString(2));
} else {
Assert.assertNull(rs.getString(2));
}
}
Assert.assertEquals(10, j);
}
@Test
public void testSinkableVarcharLeftOuterJoinWithJoinStrategy() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select cc.si, count(*) from cc left outer join dd --DERBY-PROPERTIES joinStrategy=SORTMERGE \n on cc.si = dd.si group by cc.si");
int j = 0;
while (rs.next()) {
j++;
LOG.info(String.format("cc.sa=%s,count=%dd", rs.getString(1), rs.getInt(2)));
// Assert.assertNotNull(rs.getString(1));
// if (!rs.getString(1).equals("9")) {
// Assert.assertEquals(1l,rs.getLong(2));
}
Assert.assertEquals(10, j);
}
@Test
@Ignore("Bug 325")
public void testScrollableVarcharRightOuterJoinWithJoinStrategy() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select cc.si, dd.si from cc right outer join dd --DERBY-PROPERTIES joinStrategy=SORTMERGE \n on cc.si = dd.si");
int j = 0;
while (rs.next()) {
j++;
LOG.info("cc.si=" + rs.getString(1) + ",dd.si=" + rs.getString(2));
Assert.assertNotNull(rs.getString(2));
if (!rs.getString(2).equals("9")) {
Assert.assertNotNull(rs.getString(1));
Assert.assertEquals(rs.getString(1), rs.getString(2));
} else {
Assert.assertNull(rs.getString(1));
}
}
Assert.assertEquals(9, j);
}
@Test
@Ignore("Bug 325")
public void testSinkableVarcharRightOuterJoinWithJoinStrategy() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select cc.si, count(*) from cc right outer join dd --DERBY-PROPERTIES joinStrategy=SORTMERGE \n on cc.si = dd.si group by cc.si");
int j = 0;
while (rs.next()) {
j++;
Assert.assertNotNull(rs.getString(1));
if (!rs.getString(1).equals("9")) {
Assert.assertEquals(1l, rs.getLong(2));
} else {
Assert.assertNotNull(null);
}
}
Assert.assertEquals(9, j);
}
@Test
public void testScrollableVarcharLeftOuterJoin() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select cc.si, dd.si from cc left outer join dd on cc.si = dd.si");
int j = 0;
while (rs.next()) {
j++;
String left = rs.getString(1);
String right = rs.getString(2);
System.out.printf("left=%s, right=%s%n", left, right);
Assert.assertNotNull("left side is null", left);
if (!rs.getString(1).equals("9")) {
Assert.assertNotNull("right side is null", right);
Assert.assertEquals(left, right);
} else {
Assert.assertNull("right side is not null", rs.getString(2));
}
}
Assert.assertEquals(10, j);
}
@Test
public void testSinkableVarcharLeftOuterJoin() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select cc.si, count(*) from cc left outer join dd on cc.si = dd.si group by cc.si");
int j = 0;
while (rs.next()) {
j++;
Assert.assertNotNull(rs.getString(1));
if (!rs.getString(1).equals("9")) {
Assert.assertEquals(1l, rs.getLong(2));
}
}
Assert.assertEquals(10, j);
}
@Test
public void testScrollableVarcharRightOuterJoin() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select cc.si, dd.si from cc right outer join dd on cc.si = dd.si");
int j = 0;
while (rs.next()) {
j++;
LOG.info("c.si=" + rs.getString(1) + ",d.si=" + rs.getString(2));
Assert.assertNotNull(rs.getString(2));
if (!rs.getString(2).equals("9")) {
Assert.assertNotNull(rs.getString(1));
Assert.assertEquals(rs.getString(1), rs.getString(2));
} else {
Assert.assertNull(rs.getString(1));
}
}
Assert.assertEquals(9, j);
}
@Test
public void testSinkableVarcharRightOuterJoin() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select cc.si, count(*) from cc right outer join dd on cc.si = dd.si group by cc.si");
int j = 0;
while (rs.next()) {
j++;
Assert.assertNotNull(rs.getString(1));
if (!rs.getString(1).equals("9")) {
Assert.assertEquals(1l, rs.getLong(2));
} else {
Assert.assertNotNull(null);
}
}
Assert.assertEquals(9, j);
}
@Test
public void testLeftOuterJoinWithIsNull() throws Exception {
List<Object[]> expected = Collections.singletonList( o("E5") );
ResultSet rs = methodWatcher.executeQuery("select a.empnum from staff a left outer join works b on a.empnum = b.empnum where b.empnum is null");
List results = TestUtils.resultSetToArrays(rs);
Assert.assertArrayEquals(expected.toArray(), results.toArray());
}
} |
package com.adyen;
import com.adyen.model.Amount;
import com.adyen.model.notification.NotificationRequest;
import com.adyen.model.notification.NotificationRequestItem;
import com.adyen.model.notification.NotificationRequestItemContainer;
import com.adyen.notification.NotificationHandler;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Tests notification messages
*/
public class NotificationTest extends BaseTest {
private NotificationHandler notificationHandler;
@Before
public void init() {
notificationHandler = new NotificationHandler();
}
@Test
public void testAuthorisationSuccess() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/authorisation-true.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_AUTHORISATION, notificationRequestItem.getEventCode());
assertTrue(notificationRequestItem.isSuccess());
assertEquals("123456789", notificationRequestItem.getPspReference());
}
@Test
public void testCaptureSuccess() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/capture-true.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_CAPTURE, notificationRequestItem.getEventCode());
assertTrue(notificationRequestItem.isSuccess());
assertEquals("PSP_REFERENCE", notificationRequestItem.getPspReference());
assertEquals("ORIGINAL_PSP", notificationRequestItem.getOriginalReference());
}
@Test
public void testCaptureFail() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/capture-false.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_CAPTURE, notificationRequestItem.getEventCode());
assertFalse(notificationRequestItem.isSuccess());
assertEquals("PSP_REFERENCE", notificationRequestItem.getPspReference());
assertEquals("ORIGINAL_PSP", notificationRequestItem.getOriginalReference());
}
@Test
public void testRefundSuccess() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/refund-true.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_REFUND, notificationRequestItem.getEventCode());
assertTrue(notificationRequestItem.isSuccess());
assertEquals("PSP_REFERENCE", notificationRequestItem.getPspReference());
assertEquals("ORIGINAL_PSP", notificationRequestItem.getOriginalReference());
assertNotNull(notificationRequestItem.getEventDate());
}
@Test
public void testRefundFail() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/refund-false.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_REFUND, notificationRequestItem.getEventCode());
assertFalse(notificationRequestItem.isSuccess());
assertEquals("PSP_REFERENCE", notificationRequestItem.getPspReference());
assertEquals("ORIGINAL_PSP", notificationRequestItem.getOriginalReference());
assertNotNull(notificationRequestItem.getEventDate());
}
@Test
public void testChargeback() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/chargeback.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_CHARGEBACK, notificationRequestItem.getEventCode());
assertTrue(notificationRequestItem.isSuccess());
assertEquals("9915555555555555", notificationRequestItem.getPspReference());
assertEquals("9913333333333333", notificationRequestItem.getOriginalReference());
assertNotNull(notificationRequestItem.getAmount());
assertEquals("EUR", notificationRequestItem.getAmount().getCurrency());
assertEquals(new Long(1000), notificationRequestItem.getAmount().getValue());
assertNotNull(notificationRequestItem.getEventDate());
}
@Test
public void testAuthorisationAdjustmentTrue() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/chargeback.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_CHARGEBACK, notificationRequestItem.getEventCode());
assertTrue(notificationRequestItem.isSuccess());
assertEquals("9915555555555555", notificationRequestItem.getPspReference());
assertEquals("9913333333333333", notificationRequestItem.getOriginalReference());
assertNotNull(notificationRequestItem.getAmount());
assertEquals("EUR", notificationRequestItem.getAmount().getCurrency());
assertEquals(new Long(1000), notificationRequestItem.getAmount().getValue());
assertNotNull(notificationRequestItem.getEventDate());
}
@Test
public void testCancellationTrue() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/cancellation-true.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_CANCELLATION, notificationRequestItem.getEventCode());
assertTrue(notificationRequestItem.isSuccess());
assertEquals("8412534564722331", notificationRequestItem.getPspReference());
assertEquals("8313547924770610", notificationRequestItem.getOriginalReference());
assertNotNull(notificationRequestItem.getAmount());
assertEquals("EUR", notificationRequestItem.getAmount().getCurrency());
assertEquals(new Long(500), notificationRequestItem.getAmount().getValue());
assertNotNull(notificationRequestItem.getEventDate());
}
@Test
public void testOfferClosed() throws Exception {
NotificationRequest notificationRequest = readNotificationRequestFromFile("mocks/notification/offer-closed.json");
assertEquals(1, notificationRequest.getNotificationItems().size());
NotificationRequestItem notificationRequestItem = notificationRequest.getNotificationItems().get(0);
assertEquals(NotificationRequestItem.EVENT_CODE_OFFER_CLOSED, notificationRequestItem.getEventCode());
assertTrue(notificationRequestItem.isSuccess());
assertEquals("8532565401975321", notificationRequestItem.getPspReference());
assertEquals("ideal", notificationRequestItem.getPaymentMethod());
assertNotNull(notificationRequestItem.getAmount());
assertEquals("EUR", notificationRequestItem.getAmount().getCurrency());
assertEquals(new Long(27211), notificationRequestItem.getAmount().getValue());
assertNotNull(notificationRequestItem.getEventDate());
}
@Test
public void testGsonAndJacksonSerializeNotificationRequest() throws JsonProcessingException {
NotificationRequest notificationRequest = new NotificationRequest();
notificationRequest.setLive("live");
NotificationRequestItemContainer notificationContainer = new NotificationRequestItemContainer();
NotificationRequestItem notificationRequestItem = new NotificationRequestItem();
notificationRequestItem.setAmount(new Amount());
notificationRequestItem.setEventCode("eventcode");
notificationContainer.setNotificationItem(notificationRequestItem);
notificationRequest.setNotificationItemContainers(Collections.singletonList(notificationContainer));
String gson = GSON.toJson(notificationRequest);
String jackson = OBJECT_MAPPER.writeValueAsString(notificationRequest);
assertEquals(jackson, gson);
}
private NotificationRequest readNotificationRequestFromFile(String resourcePath) {
String json = getFileContents(resourcePath);
return notificationHandler.handleNotificationJson(json);
}
} |
package innovimax.mixthem;
import innovimax.mixthem.arguments.Rule;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Innovimax
* @version 1.0
*/
public class RuleRuns {
final private Map<Rule, List<RuleRun>> runMap;
public RuleRuns() {
runMap = new HashMap<Rule, List<RuleRun>>();
for (Rule rule : Rule.values()) {
List<RuleRun> runs = new ArrayList<RuleRun>();
if (rule.hasParams()) {
switch (rule) {
case _RANDOM_ALT_LINE:
runs.add(new RuleRun(1, Collections.emptyList()));
runs.add(new RuleRun(1, Collections.singletonList("1789")));
break;
case _JOIN:
runs.add(new RuleRun(3, Collections.emptyList()));
//runs.add(new RuleRun(4, Arrays.asList("2", "1")));
//runs.add(new RuleRun(5, Collections.singletonList("1")));
break;
}
} else {
runs.add(new RuleRun(Collections.emptyList()));
}
runMap.put(rule, runs);
}
}
public List<RuleRun> getRuns(Rule rule) {
return runMap.get(rule);
}
} |
package org.takes.http;
import com.google.common.base.Joiner;
import com.jcabi.http.request.JdkRequest;
import com.jcabi.http.response.RestResponse;
import com.jcabi.matchers.RegexMatchers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.URI;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mockito;
import org.takes.Request;
import org.takes.Response;
import org.takes.Take;
import org.takes.facets.fork.FkRegex;
import org.takes.facets.fork.TkFork;
import org.takes.rq.RqHeaders;
import org.takes.rq.RqSocket;
import org.takes.rs.RsEmpty;
import org.takes.tk.TkText;
/**
* Test case for {@link BkBasic}.
*
* @author Dmitry Zaytsev (dmitry.zaytsev@gmail.com)
* @version $Id$
* @since 0.15.2
* @checkstyle ClassDataAbstractionCouplingCheck (500 lines)
* @checkstyle MultipleStringLiteralsCheck (500 lines)
* @todo #306:30min At the moment we don't support HTTP
* persistent connections. Would be great to implement
* this feature. BkBasic.accept should handle more
* than one HTTP request in one connection.
* @todo #516:30min It will be nice to refactor tests with Socket usage and
* replace them to real statements. See usage of BkBasicTest.createMockSocket.
* @todo #516:15min Move header names from BkBasic to public constants.
* Reusable header names will help in many situations. For example - in new
* integration tests.
*/
@SuppressWarnings(
{
"PMD.ExcessiveImports",
"PMD.DoNotUseThreads",
"PMD.TooManyMethods"
})
public final class BkBasicTest {
/**
* Carriage return constant.
*/
private static final String CRLF = "\r\n";
/**
* POST header constant.
*/
private static final String POST = "POST / HTTP/1.1";
/**
* Host header constant.
*/
private static final String HOST = "Host:localhost";
/**
* BkBasic can handle socket data.
*
* @throws IOException If some problem inside
*/
@Test
public void handlesSocket() throws IOException {
final Socket socket = createMockSocket();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
Mockito.when(socket.getOutputStream()).thenReturn(baos);
new BkBasic(new TkText("Hello world!")).accept(socket);
MatcherAssert.assertThat(
baos.toString(),
Matchers.containsString("Hello world")
);
}
/**
* BkBasic can return HTTP status 404 when accessing invalid URL.
*
* @throws IOException if any I/O error occurs.
*/
@Test
public void returnsProperResponseCodeOnInvalidUrl() throws IOException {
new FtRemote(
new TkFork(
new FkRegex("/path/a", new TkText("a")),
new FkRegex("/path/b", new TkText("b"))
)
).exec(
new FtRemote.Script() {
@Override
public void exec(final URI home) throws IOException {
new JdkRequest(String.format("%s/path/c", home))
.fetch()
.as(RestResponse.class)
.assertStatus(HttpURLConnection.HTTP_NOT_FOUND);
}
}
);
}
/**
* BkBasic produces headers with addresses without slashes.
*
* @throws IOException If some problem inside
*/
@Test
public void addressesInHeadersAddedWithoutSlashes() throws IOException {
final Socket socket = BkBasicTest.createMockSocket();
final Request[] holder = new Request[1];
new BkBasic(
new Take() {
@Override
public Response act(final Request req) {
holder[0] = req;
return new RsEmpty();
}
}
).accept(socket);
final Request request = holder[0];
final RqHeaders.Smart smart = new RqHeaders.Smart(
new RqHeaders.Base(request)
);
MatcherAssert.assertThat(
smart.single(
"X-Takes-LocalAddress",
""
),
Matchers.not(
Matchers.containsString("/")
)
);
MatcherAssert.assertThat(
smart.single(
"X-Takes-RemoteAddress",
""
),
Matchers.not(
Matchers.containsString("/")
)
);
MatcherAssert.assertThat(
new RqSocket(request).getLocalAddress(),
Matchers.notNullValue()
);
MatcherAssert.assertThat(
new RqSocket(request).getRemoteAddress(),
Matchers.notNullValue()
);
}
/**
* BkBasic can handle two requests in one connection.
*
* @throws Exception If some problem inside
*/
@Ignore
@Test
public void handlesTwoRequestInOneConnection() throws Exception {
final String text = "Hello Twice!";
final ByteArrayOutputStream output = new ByteArrayOutputStream();
final ServerSocket server = new ServerSocket(0);
try {
new Thread(
new Runnable() {
@Override
public void run() {
try {
new BkBasic(new TkText(text)).accept(
server.accept()
);
} catch (final IOException exception) {
throw new IllegalStateException(exception);
}
}
}
).start();
final Socket socket = new Socket(
server.getInetAddress(),
server.getLocalPort()
);
try {
socket.getOutputStream().write(
Joiner.on(BkBasicTest.CRLF).join(
BkBasicTest.POST,
BkBasicTest.HOST,
"Content-Length: 11",
"",
"Hello First",
BkBasicTest.POST,
BkBasicTest.HOST,
"Content-Length: 12",
"",
"Hello Second"
).getBytes()
);
final InputStream input = socket.getInputStream();
// @checkstyle MagicNumber (1 line)
final byte[] buffer = new byte[4096];
for (int count = input.read(buffer); count != -1;
count = input.read(buffer)) {
output.write(buffer, 0, count);
}
} finally {
socket.close();
}
} finally {
server.close();
}
MatcherAssert.assertThat(
output.toString(),
RegexMatchers.containsPattern(text + ".*?" + text)
);
}
/**
* BkBasic can return HTTP status 411 when a persistent connection request
* has no Content-Length.
*
* @throws Exception If some problem inside
*/
@Ignore
@Test
public void returnsProperResponseCodeOnNoContentLength() throws Exception {
final ByteArrayOutputStream output = new ByteArrayOutputStream();
final ServerSocket server = new ServerSocket(0);
try {
new Thread(
new Runnable() {
@Override
public void run() {
try {
new BkBasic(new TkText("411 Test")).accept(
server.accept()
);
} catch (final IOException exception) {
throw new IllegalStateException(exception);
}
}
}
).start();
final Socket socket = new Socket(
server.getInetAddress(),
server.getLocalPort()
);
try {
socket.getOutputStream().write(
Joiner.on(BkBasicTest.CRLF).join(
BkBasicTest.POST,
BkBasicTest.HOST,
"",
"Hello World!"
).getBytes()
);
final InputStream input = socket.getInputStream();
// @checkstyle MagicNumber (1 line)
final byte[] buffer = new byte[4096];
for (int count = input.read(buffer); count != -1;
count = input.read(buffer)) {
output.write(buffer, 0, count);
}
} finally {
socket.close();
}
} finally {
server.close();
}
MatcherAssert.assertThat(
output.toString(),
Matchers.containsString("HTTP/1.1 411 Length Required")
);
}
/**
* BkBasic can accept no content-length on closed connection.
*
* @throws Exception If some problem inside
*/
@Ignore
@Test
public void acceptsNoContentLengthOnClosedConnection() throws Exception {
final String text = "Close Test";
final ByteArrayOutputStream output = new ByteArrayOutputStream();
final ServerSocket server = new ServerSocket(0);
try {
new Thread(
new Runnable() {
@Override
public void run() {
try {
new BkBasic(new TkText(text)).accept(
server.accept()
);
} catch (final IOException exception) {
throw new IllegalStateException(exception);
}
}
}
).start();
final Socket socket = new Socket(
server.getInetAddress(),
server.getLocalPort()
);
try {
socket.getOutputStream().write(
Joiner.on(BkBasicTest.CRLF).join(
BkBasicTest.POST,
BkBasicTest.HOST,
"Connection: Close",
"",
"Hello World!"
).getBytes()
);
final InputStream input = socket.getInputStream();
// @checkstyle MagicNumber (1 line)
final byte[] buffer = new byte[4096];
for (int count = input.read(buffer); count != -1;
count = input.read(buffer)) {
output.write(buffer, 0, count);
}
} finally {
socket.close();
}
} finally {
server.close();
}
MatcherAssert.assertThat(
output.toString(),
Matchers.containsString(text)
);
}
/**
* Creates Socket mock for reuse.
*
* @return Prepared Socket mock
* @throws IOException If some problem inside
*/
private static Socket createMockSocket() throws IOException {
final Socket socket = Mockito.mock(Socket.class);
Mockito.when(socket.getInputStream()).thenReturn(
new ByteArrayInputStream(
Joiner.on(BkBasicTest.CRLF).join(
"GET / HTTP/1.1",
"Host:localhost",
"Content-Length: 2",
"",
"hi"
).getBytes()
)
);
Mockito.when(socket.getLocalAddress()).thenReturn(
InetAddress.getLocalHost()
);
Mockito.when(socket.getLocalPort()).thenReturn(0);
Mockito.when(socket.getInetAddress()).thenReturn(
InetAddress.getLocalHost()
);
Mockito.when(socket.getPort()).thenReturn(0);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
Mockito.when(socket.getOutputStream()).thenReturn(baos);
return socket;
}
} |
package de.ub0r.android.websms.connector.common;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
/**
* @author flx
*/
public final class Log {
/** Tag for output. */
public static final String TAG = "WebSMS";
/** Packagename of SendLog. */
private static final String SENDLOG_PACKAGE_NAME = "org.l6n.sendlog";
/** Classname of SendLog. */
// private static final String SENDLOG_CLASS_NAME = ".SendLog";
/** Priority constant for the println method. */
public static final int ASSERT = android.util.Log.ASSERT;
/** Priority constant for the println method; use Log.d. */
public static final int DEBUG = android.util.Log.DEBUG;
/** Priority constant for the println method; use Log.e. */
public static final int ERROR = android.util.Log.ERROR;
/** Priority constant for the println method; use Log.i. */
public static final int INFO = android.util.Log.INFO;
/** Priority constant for the println method; use Log.v. */
public static final int VERBOSE = android.util.Log.VERBOSE;
/** Priority constant for the println method; use Log.w. */
public static final int WARN = android.util.Log.WARN;
/**
* Fire a given {@link Intent}.
*
* @author flx
*/
private static class FireIntent implements DialogInterface.OnClickListener {
/** {@link Activity}. */
private final Activity a;
/** {@link Intent}. */
private final Intent i;
/**
* Default Constructor.
*
* @param activity
* {@link Activity}
* @param intent
* {@link Intent}
*/
public FireIntent(final Activity activity, final Intent intent) {
this.a = activity;
this.i = intent;
}
/**
* {@inheritDoc}
*/
public void onClick(final DialogInterface dialog,
final int whichButton) {
this.a.startActivity(this.i);
}
}
/**
* Default Constructor.
*/
private Log() {
}
/**
* Send a DEBUG log message.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
*/
public static void d(final String tag, final String msg) {
android.util.Log.d(TAG, tag + ": " + msg);
}
/**
* Send a DEBUG log message and log the exception.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
* @param tr
* An exception to log.
*/
public static void d(final String tag, final String msg,
final Throwable tr) {
android.util.Log.d(TAG, tag + ": " + msg, tr);
}
/**
* Send a ERROR log message.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
*/
public static void e(final String tag, final String msg) {
android.util.Log.e(TAG, tag + ": " + msg);
}
/**
* Send a ERROR log message and log the exception.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
* @param tr
* An exception to log.
*/
public static void e(final String tag, final String msg,
final Throwable tr) {
android.util.Log.e(TAG, tag + ": " + msg, tr);
}
/**
* Send a INFO log message.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
*/
public static void i(final String tag, final String msg) {
android.util.Log.i(TAG, tag + ": " + msg);
}
/**
* Send a INFO log message and log the exception.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
* @param tr
* An exception to log.
*/
public static void i(final String tag, final String msg,
final Throwable tr) {
android.util.Log.i(TAG, tag + ": " + msg, tr);
}
/**
* Send a VERBOSE log message.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
*/
public static void v(final String tag, final String msg) {
android.util.Log.v(TAG, tag + ": " + msg);
}
/**
* Send a VERBOSE log message and log the exception.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
* @param tr
* An exception to log.
*/
public static void v(final String tag, final String msg,
final Throwable tr) {
android.util.Log.v(TAG, tag + ": " + msg, tr);
}
/**
* Send a WARN log message.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
*/
public static void w(final String tag, final String msg) {
android.util.Log.w(TAG, tag + ": " + msg);
}
/**
* Send a WARN log message and log the exception.
*
* @param tag
* Used to identify the source of a log message. It usually
* identifies the class or activity where the log call occurs.
* @param msg
* The message you would like logged.
* @param tr
* An exception to log.
*/
public static void w(final String tag, final String msg,
final Throwable tr) {
android.util.Log.w(TAG, tag + ": " + msg, tr);
}
/**
* Collect and send Log.
*
* @param activity
* {@link Activity}.
*/
public static void collectAndSendLog(final Activity activity) {
final PackageManager packageManager = activity.getPackageManager();
Intent intent = packageManager
.getLaunchIntentForPackage(SENDLOG_PACKAGE_NAME);
int title, message;
if (intent == null) {
intent = new Intent(Intent.ACTION_VIEW, Uri
.parse("market://search?q=pname:" + SENDLOG_PACKAGE_NAME));
title = R.string.sendlog_install_;
message = R.string.sendlog_install;
} else {
intent.putExtra("filter", TAG + ":D *:W");
intent.setType("0||flx.yoo@gmail.com");
title = R.string.sendlog_run_;
message = R.string.sendlog_run;
}
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
final AlertDialog.Builder b = new AlertDialog.Builder(activity);
b.setIcon(android.R.drawable.ic_dialog_info);
b.setTitle(title);
b.setMessage(message);
b.setPositiveButton(android.R.string.ok, new FireIntent(activity,
intent));
b.setNegativeButton(android.R.string.cancel, null);
b.show();
}
} |
package com.hubspot.singularity.scheduler;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import com.hubspot.deploy.HealthcheckOptions;
import com.hubspot.mesos.JavaUtils;
import com.hubspot.singularity.ExtendedTaskState;
import com.hubspot.singularity.SingularityAction;
import com.hubspot.singularity.SingularityDeploy;
import com.hubspot.singularity.SingularityDeployFailure;
import com.hubspot.singularity.SingularityDeployFailureReason;
import com.hubspot.singularity.SingularityRequest;
import com.hubspot.singularity.SingularityRequestHistory;
import com.hubspot.singularity.SingularityTask;
import com.hubspot.singularity.SingularityTaskHealthcheckResult;
import com.hubspot.singularity.SingularityTaskHistoryUpdate;
import com.hubspot.singularity.SingularityTaskHistoryUpdate.SimplifiedTaskState;
import com.hubspot.singularity.SingularityTaskId;
import com.hubspot.singularity.config.SingularityConfiguration;
import com.hubspot.singularity.data.DisasterManager;
import com.hubspot.singularity.data.RequestManager;
import com.hubspot.singularity.data.TaskManager;
@Singleton
public class SingularityDeployHealthHelper {
private static final Logger LOG = LoggerFactory.getLogger(SingularityDeployHealthHelper.class);
private final TaskManager taskManager;
private final SingularityConfiguration configuration;
private final RequestManager requestManager;
private final DisasterManager disasterManager;
@Inject
public SingularityDeployHealthHelper(TaskManager taskManager, SingularityConfiguration configuration, RequestManager requestManager, DisasterManager disasterManager) {
this.taskManager = taskManager;
this.configuration = configuration;
this.requestManager = requestManager;
this.disasterManager = disasterManager;
}
public enum DeployHealth {
WAITING, UNHEALTHY, HEALTHY;
}
private boolean shouldCheckHealthchecks(final SingularityRequest request, final Optional<SingularityDeploy> deploy, final Collection<SingularityTaskId> activeTasks, final boolean isDeployPending) {
if (disasterManager.isDisabled(SingularityAction.RUN_HEALTH_CHECKS)) {
return false;
}
if (!deploy.isPresent()) {
return false;
}
if (!deploy.get().getHealthcheck().isPresent()) {
return false;
}
if (isDeployPending && deploy.get().getSkipHealthchecksOnDeploy().or(false)) {
return false;
}
if (request.getSkipHealthchecks().or(Boolean.FALSE)) {
return false;
}
for (SingularityTask task : taskManager.getTasks(activeTasks).values()) {
if (task.getTaskRequest().getPendingTask().getSkipHealthchecks().or(Boolean.FALSE)) {
return false;
}
}
return true;
}
public DeployHealth getDeployHealth(final SingularityRequest request, final Optional<SingularityDeploy> deploy, final Collection<SingularityTaskId> activeTasks, final boolean isDeployPending) {
if (shouldCheckHealthchecks(request, deploy, activeTasks, isDeployPending)) {
return getHealthcheckDeployState(deploy.get(), activeTasks, isDeployPending);
} else {
return getNoHealthcheckDeployHealth(deploy, activeTasks);
}
}
public List<SingularityTaskId> getHealthyTasks(final SingularityRequest request, final Optional<SingularityDeploy> deploy, final Collection<SingularityTaskId> activeTasks, final boolean isDeployPending) {
if (shouldCheckHealthchecks(request, deploy, activeTasks, isDeployPending)) {
return getHealthcheckedHealthyTasks(deploy.get(), activeTasks, isDeployPending);
} else {
return getNoHealthcheckHealthyTasks(deploy, activeTasks);
}
}
private DeployHealth getNoHealthcheckDeployHealth(final Optional<SingularityDeploy> deploy, final Collection<SingularityTaskId> matchingActiveTasks) {
final Map<SingularityTaskId, List<SingularityTaskHistoryUpdate>> taskUpdates = taskManager.getTaskHistoryUpdates(matchingActiveTasks);
for (SingularityTaskId taskId : matchingActiveTasks) {
Collection<SingularityTaskHistoryUpdate> updates = taskUpdates.get(taskId);
SimplifiedTaskState currentState = SingularityTaskHistoryUpdate.getCurrentState(updates);
switch (currentState) {
case UNKNOWN:
case WAITING:
return DeployHealth.WAITING;
case DONE:
LOG.warn("Unexpectedly found an active task ({}) in done state: {}}", taskId, updates);
return DeployHealth.UNHEALTHY;
case RUNNING:
if (!isRunningTaskHealthy(deploy, updates, taskId)) {
return DeployHealth.WAITING;
}
}
}
return DeployHealth.HEALTHY;
}
private List<SingularityTaskId> getNoHealthcheckHealthyTasks(final Optional<SingularityDeploy> deploy, final Collection<SingularityTaskId> matchingActiveTasks) {
final Map<SingularityTaskId, List<SingularityTaskHistoryUpdate>> taskUpdates = taskManager.getTaskHistoryUpdates(matchingActiveTasks);
final List<SingularityTaskId> healthyTaskIds = Lists.newArrayListWithCapacity(matchingActiveTasks.size());
for (SingularityTaskId taskId : matchingActiveTasks) {
Collection<SingularityTaskHistoryUpdate> updates = taskUpdates.get(taskId);
SimplifiedTaskState currentState = SingularityTaskHistoryUpdate.getCurrentState(updates);
if (currentState == SimplifiedTaskState.RUNNING && isRunningTaskHealthy(deploy, updates, taskId)) {
healthyTaskIds.add(taskId);
}
}
return healthyTaskIds;
}
private boolean isRunningTaskHealthy(final Optional<SingularityDeploy> deploy, Collection<SingularityTaskHistoryUpdate> updates, SingularityTaskId taskId) {
long runningThreshold = configuration.getConsiderTaskHealthyAfterRunningForSeconds();
if (deploy.isPresent()) {
runningThreshold = deploy.get().getConsiderHealthyAfterRunningForSeconds().or(runningThreshold);
}
if (runningThreshold < 1) {
return true;
}
Optional<SingularityTaskHistoryUpdate> runningUpdate = SingularityTaskHistoryUpdate.getUpdate(updates, ExtendedTaskState.TASK_RUNNING);
long taskDuration = System.currentTimeMillis() - runningUpdate.get().getTimestamp();
long runningThresholdMillis = TimeUnit.SECONDS.toMillis(runningThreshold);
if (taskDuration < runningThresholdMillis) {
LOG.debug("Task {} has been running for {}, has not yet reached running threshold of {}", taskId, JavaUtils.durationFromMillis(taskDuration), JavaUtils.durationFromMillis(runningThresholdMillis));
return false;
}
return true;
}
private DeployHealth getHealthcheckDeployState(final SingularityDeploy deploy, final Collection<SingularityTaskId> matchingActiveTasks, final boolean isDeployPending) {
Map<SingularityTaskId, SingularityTaskHealthcheckResult> healthcheckResults = taskManager.getLastHealthcheck(matchingActiveTasks);
List<SingularityRequestHistory> requestHistories = requestManager.getRequestHistory(deploy.getRequestId());
for (SingularityTaskId taskId : matchingActiveTasks) {
DeployHealth individualTaskHealth;
if (healthchecksSkipped(taskId, requestHistories, deploy)) {
LOG.trace("Detected skipped healthchecks for {}", taskId);
individualTaskHealth = DeployHealth.HEALTHY;
} else {
individualTaskHealth = getTaskHealth(deploy, isDeployPending, Optional.fromNullable(healthcheckResults.get(taskId)), taskId);
}
if (individualTaskHealth != DeployHealth.HEALTHY) {
return individualTaskHealth;
}
}
return DeployHealth.HEALTHY;
}
private List<SingularityTaskId> getHealthcheckedHealthyTasks(final SingularityDeploy deploy, final Collection<SingularityTaskId> matchingActiveTasks, final boolean isDeployPending) {
final Map<SingularityTaskId, SingularityTaskHealthcheckResult> healthcheckResults = taskManager.getLastHealthcheck(matchingActiveTasks);
final List<SingularityTaskId> healthyTaskIds = Lists.newArrayListWithCapacity(matchingActiveTasks.size());
List<SingularityRequestHistory> requestHistories = requestManager.getRequestHistory(deploy.getRequestId());
for (SingularityTaskId taskId : matchingActiveTasks) {
DeployHealth individualTaskHealth;
if (healthchecksSkipped(taskId, requestHistories, deploy)) {
LOG.trace("Detected skipped healthchecks for {}", taskId);
individualTaskHealth = DeployHealth.HEALTHY;
} else {
individualTaskHealth = getTaskHealth(deploy, isDeployPending, Optional.fromNullable(healthcheckResults.get(taskId)), taskId);
}
if (individualTaskHealth == DeployHealth.HEALTHY) {
healthyTaskIds.add(taskId);
}
}
return healthyTaskIds;
}
private boolean healthchecksSkipped(SingularityTaskId taskId, List<SingularityRequestHistory> requestHistories, SingularityDeploy deploy) {
if (deploy.getSkipHealthchecksOnDeploy().or(false)) {
return true;
}
Optional<SingularityTask> maybeTask = taskManager.getTask(taskId);
if (maybeTask.isPresent()) {
if (maybeTask.get().getTaskRequest().getPendingTask().getSkipHealthchecks().or(false)) {
return true;
}
Optional<Long> runningStartTime = getRunningAt(taskManager.getTaskHistoryUpdates(taskId));
if (runningStartTime.isPresent()) {
Optional<SingularityRequestHistory> previousHistory = Optional.absent();
for (SingularityRequestHistory history : requestHistories) {
if (history.getCreatedAt() < runningStartTime.get() && (!previousHistory.isPresent() || previousHistory.get().getCreatedAt() < history.getCreatedAt())) {
previousHistory = Optional.of(history);
}
}
if (previousHistory.isPresent() && previousHistory.get().getRequest().getSkipHealthchecks().or(false)) {
return true;
}
}
}
return false;
}
public DeployHealth getTaskHealth(SingularityDeploy deploy, boolean isDeployPending, Optional<SingularityTaskHealthcheckResult> healthcheckResult, SingularityTaskId taskId) {
Optional<SingularityTask> task = taskManager.getTask(taskId);
if (task.isPresent()) {
if (task.get().getTaskRequest().getRequest().getSkipHealthchecks().or(false)) {
LOG.debug("Healthcheck skipped for {}", taskId);
return DeployHealth.HEALTHY;
}
}
LOG.debug("deploy healthhcheck: {}", deploy.getHealthcheck());
if (deploy.getHealthcheck().isPresent() && deploy.getHealthcheck().get().getHealthcheckResultFilePath().isPresent()) {
LOG.debug("ihstory update: {}", taskManager.getTaskHistoryUpdate(taskId, ExtendedTaskState.TASK_RUNNING));
if (taskManager.getTaskHistoryUpdate(taskId, ExtendedTaskState.TASK_RUNNING).isPresent()) {
LOG.debug("Task {} has non-web healthcheck and is in running state, marking healthy.", taskId);
return DeployHealth.HEALTHY;
}
}
if (!healthcheckResult.isPresent()) {
LOG.debug("No healthcheck present for {}", taskId);
return DeployHealth.WAITING;
} else if (healthcheckResult.get().isFailed()) {
LOG.debug("Found a failed healthcheck: {}", healthcheckResult);
if (deploy.getHealthcheck().isPresent() && healthcheckResult.get().getStatusCode().isPresent()
&& deploy.getHealthcheck().get().getFailureStatusCodes().or(configuration.getHealthcheckFailureStatusCodes()).contains(healthcheckResult.get().getStatusCode().get())) {
LOG.debug("Failed healthcheck had bad status code: {}", healthcheckResult.get().getStatusCode().get());
return DeployHealth.UNHEALTHY;
}
final int startupTimeout = deploy.getHealthcheck().isPresent() ? deploy.getHealthcheck().get().getStartupTimeoutSeconds().or(configuration.getStartupTimeoutSeconds()) : configuration.getStartupTimeoutSeconds();
Collection<SingularityTaskHistoryUpdate> updates = taskManager.getTaskHistoryUpdates(taskId);
Optional<Long> runningAt = getRunningAt(updates);
if (runningAt.isPresent()) {
final long durationSinceRunning = System.currentTimeMillis() - runningAt.get();
if (healthcheckResult.get().isStartup() && durationSinceRunning > TimeUnit.SECONDS.toMillis(startupTimeout)) {
LOG.debug("{} has not responded to healthchecks in {}s", taskId, startupTimeout);
return DeployHealth.UNHEALTHY;
}
}
final Optional<Integer> healthcheckMaxRetries = deploy.getHealthcheck().isPresent() ? deploy.getHealthcheck().get().getMaxRetries().or(configuration.getHealthcheckMaxRetries()) : Optional.<Integer>absent();
if (healthcheckMaxRetries.isPresent() && taskManager.getNumNonstartupHealthchecks(taskId) > healthcheckMaxRetries.get()) {
LOG.debug("{} failed {} healthchecks, the max for the deploy", taskId, healthcheckMaxRetries.get());
return DeployHealth.UNHEALTHY;
}
final Optional<Integer> healthcheckMaxTotalTimeoutSeconds = deploy.getHealthcheck().isPresent() ? Optional.of(getMaxHealthcheckTimeoutSeconds(deploy.getHealthcheck().get())) : Optional.<Integer>absent();
if (isDeployPending && healthcheckMaxTotalTimeoutSeconds.isPresent()) {
if (runningAt.isPresent()) {
final long durationSinceRunning = System.currentTimeMillis() - runningAt.get();
if (durationSinceRunning > TimeUnit.SECONDS.toMillis(healthcheckMaxTotalTimeoutSeconds.get())) {
LOG.debug("{} has been running for {} and has yet to pass healthchecks, failing deploy", taskId, JavaUtils.durationFromMillis(durationSinceRunning));
return DeployHealth.UNHEALTHY;
}
}
}
return DeployHealth.WAITING;
}
return DeployHealth.HEALTHY;
}
public int getMaxHealthcheckTimeoutSeconds(HealthcheckOptions options) {
int intervalSeconds = options.getIntervalSeconds().or(configuration.getHealthcheckIntervalSeconds());
int responseTimeSeconds = options.getResponseTimeoutSeconds().or(configuration.getHealthcheckTimeoutSeconds());
int startupTime = options.getStartupTimeoutSeconds().or(configuration.getStartupTimeoutSeconds());
int attempts = options.getMaxRetries().or(configuration.getHealthcheckMaxRetries()).or(0) + 1;
return startupTime + ((intervalSeconds + responseTimeSeconds) * attempts);
}
public List<SingularityDeployFailure> getTaskFailures(final Optional<SingularityDeploy> deploy, final Collection<SingularityTaskId> activeTasks) {
List<SingularityDeployFailure> failures = new ArrayList<>();
Map<SingularityTaskId, List<SingularityTaskHistoryUpdate>> taskUpdates = taskManager.getTaskHistoryUpdates(activeTasks);
Map<SingularityTaskId, SingularityTaskHealthcheckResult> healthcheckResults = taskManager.getLastHealthcheck(activeTasks);
for (SingularityTaskId taskId : activeTasks) {
Optional<SingularityDeployFailure> maybeFailure = getTaskFailure(deploy.get(), taskUpdates, healthcheckResults, taskId);
if (maybeFailure.isPresent()) {
failures.add(maybeFailure.get());
}
}
return failures;
}
private Optional<SingularityDeployFailure> getTaskFailure(SingularityDeploy deploy, Map<SingularityTaskId, List<SingularityTaskHistoryUpdate>> taskUpdates,
Map<SingularityTaskId, SingularityTaskHealthcheckResult> healthcheckResults, SingularityTaskId taskId) {
SingularityTaskHealthcheckResult healthcheckResult = healthcheckResults.get(taskId);
Optional<SingularityDeployFailure> maybeFailure;
if (healthcheckResult == null) {
maybeFailure = getNonHealthcheckedTaskFailure(taskUpdates, taskId);
} else {
maybeFailure = getHealthcheckedTaskFailure(deploy, taskUpdates, healthcheckResult, taskId);
}
return maybeFailure;
}
private Optional<SingularityDeployFailure> getHealthcheckedTaskFailure(SingularityDeploy deploy, Map<SingularityTaskId, List<SingularityTaskHistoryUpdate>> taskUpdates,
SingularityTaskHealthcheckResult healthcheckResult, SingularityTaskId taskId) {
Collection<SingularityTaskHistoryUpdate> updates = taskUpdates.get(taskId);
if (!healthcheckResult.isFailed()) {
return Optional.absent();
}
SingularityTaskHistoryUpdate lastUpdate = Iterables.getLast(updates);
if (lastUpdate.getTaskState().isDone()) {
if (lastUpdate.getTaskState().isSuccess()) {
return Optional.of(new SingularityDeployFailure(SingularityDeployFailureReason.TASK_EXPECTED_RUNNING_FINISHED, Optional.of(taskId),
Optional.of(String.format("Task was expected to maintain TASK_RUNNING state but finished. (%s)", lastUpdate.getStatusMessage().or("")))));
} else {
return Optional.of(new SingularityDeployFailure(SingularityDeployFailureReason.TASK_FAILED_ON_STARTUP, Optional.of(taskId), lastUpdate.getStatusMessage()));
}
}
final Optional<Integer> healthcheckMaxRetries = deploy.getHealthcheck().isPresent() ?
deploy.getHealthcheck().get().getMaxRetries().or(configuration.getHealthcheckMaxRetries()) : configuration.getHealthcheckMaxRetries();
if (healthcheckMaxRetries.isPresent() && taskManager.getNumNonstartupHealthchecks(taskId) > healthcheckMaxRetries.get()) {
String message = String.format("Instance %s failed %s healthchecks, the max for the deploy.", taskId.getInstanceNo(), healthcheckMaxRetries.get() + 1);
if (healthcheckResult.getStatusCode().isPresent()) {
message = String.format("%s Last check returned with status code %s", message, healthcheckResult.getStatusCode().get());
}
return Optional.of(new SingularityDeployFailure(SingularityDeployFailureReason.TASK_FAILED_HEALTH_CHECKS, Optional.of(taskId), Optional.of(message)));
}
Optional<Long> runningAt = getRunningAt(updates);
if (runningAt.isPresent()) {
final long durationSinceRunning = System.currentTimeMillis() - runningAt.get();
if (healthcheckResult.isStartup() && deploy.getHealthcheck().isPresent() && durationSinceRunning > deploy.getHealthcheck().get().getStartupTimeoutSeconds()
.or(configuration.getStartupTimeoutSeconds())) {
String message = String.format("Instance %s has not responded to healthchecks after running for %s", taskId.getInstanceNo(), JavaUtils.durationFromMillis(durationSinceRunning));
return Optional.of(new SingularityDeployFailure(SingularityDeployFailureReason.TASK_FAILED_HEALTH_CHECKS, Optional.of(taskId), Optional.of(message)));
}
if (isRunningLongerThanThreshold(deploy, durationSinceRunning)) {
String message = String.format("Instance %s has been running for %s and has yet to pass healthchecks.", taskId.getInstanceNo(), JavaUtils.durationFromMillis(durationSinceRunning));
if (healthcheckResult.getStatusCode().isPresent()) {
message = String.format("%s Last check returned with status code %s", message, healthcheckResult.getStatusCode().get());
}
return Optional.of(new SingularityDeployFailure(SingularityDeployFailureReason.TASK_FAILED_HEALTH_CHECKS, Optional.of(taskId), Optional.of(message)));
}
}
return Optional.absent();
}
private boolean isRunningLongerThanThreshold(SingularityDeploy deploy, long durationSinceRunning) {
long relevantTimeoutSeconds = deploy.getHealthcheck().isPresent() ?
getMaxHealthcheckTimeoutSeconds(deploy.getHealthcheck().get()) : deploy.getDeployHealthTimeoutSeconds().or(configuration.getDeployHealthyBySeconds());
return durationSinceRunning > TimeUnit.SECONDS.toMillis(relevantTimeoutSeconds);
}
private Optional<Long> getRunningAt(Collection<SingularityTaskHistoryUpdate> updates) {
for (SingularityTaskHistoryUpdate update : updates) {
if (update.getTaskState() == ExtendedTaskState.TASK_RUNNING) {
return Optional.of(update.getTimestamp());
}
}
return Optional.absent();
}
private Optional<SingularityDeployFailure> getNonHealthcheckedTaskFailure(Map<SingularityTaskId, List<SingularityTaskHistoryUpdate>> taskUpdates, SingularityTaskId taskId) {
List<SingularityTaskHistoryUpdate> updates = taskUpdates.get(taskId);
SingularityTaskHistoryUpdate lastUpdate = Iterables.getLast(updates);
if (lastUpdate.getTaskState().isSuccess()) {
return Optional.of(new SingularityDeployFailure(SingularityDeployFailureReason.TASK_EXPECTED_RUNNING_FINISHED, Optional.of(taskId),
Optional.of(String.format("Task was expected to maintain TASK_RUNNING state but finished. (%s)", lastUpdate.getStatusMessage().or("")))));
} else if (lastUpdate.getTaskState().isDone()) {
return Optional.of(new SingularityDeployFailure(SingularityDeployFailureReason.TASK_FAILED_ON_STARTUP, Optional.of(taskId), lastUpdate.getStatusMessage()));
} else if (SingularityTaskHistoryUpdate.getCurrentState(updates) == SimplifiedTaskState.WAITING) {
return Optional.of(new SingularityDeployFailure(SingularityDeployFailureReason.TASK_NEVER_ENTERED_RUNNING, Optional.of(taskId),
Optional.of(String.format("Task never entered running state, last state was %s (%s)", lastUpdate.getTaskState().getDisplayName(), lastUpdate.getStatusMessage().or("")))));
}
return Optional.absent();
}
} |
package jasy;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.Test;
import static org.junit.Assert.*;
import static jasy.Assertion.*;
import jasy.lang.ASMCompiler;
import jasy.lang.ASMCompiler.Message;
import jasy.lang.ClassResolver;
import jasy.lang.CommonClassMap;
import jasy.lang.CommonClassResolver;
import jasy.lang.ExhaustiveClassTransformer;
import jasy.lang.ast.ModuleAST;
import jasy.lang.ast.Transformation;
import java.io.ByteArrayInputStream;
import java.io.PrintWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.function.BiPredicate;
import java.util.stream.Collectors;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.tree.ClassNode;
import org.objectweb.asm.util.CheckClassAdapter;
/**
*
* @author Jakob
*/
public class SourceToClassTest {
@Test
public void testAllClassesAdd1PublicPrimitiveField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public float someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(float.class)))
.and(fmodifiers(isPublic()))
.and(fmodifiers(isStatic().negate()))
))
);
}
@Test
public void testAllClassesAdd1ProtectedPrimitiveField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+protected float someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(float.class)))
.and(fmodifiers(isProtected()))
.and(fmodifiers(isStatic().negate()))
))
);
}
@Test
public void testAllClassesAdd1PrivatePrimitiveField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private float someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(float.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
))
);
}
@Test
public void testAllClassesAdd1PublicStaticPrimitiveField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public static float someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(float.class)))
.and(fmodifiers(isPublic()))
.and(fmodifiers(isStatic()))
))
);
}
@Test
public void testAllClassesAdd1PublicObjectField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(String.class)))
.and(fmodifiers(isPublic()))
.and(fmodifiers(isStatic().negate()))
))
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningStringLiteral() throws IOException {
String expectedResult = "Hi";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return \"" + expectedResult + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningGeneratedStringLiteral() throws IOException {
String expectedResult = "Hi";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return $\"" + expectedResult + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningGeneratedStringConcatenation() throws IOException {
String str1 = "H";
String str2 = "i";
String expectedResult = str1 + str2;
String strConcSrc = "\"" + str1 + "\" + \"" + str2 + "\"";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return $" + strConcSrc + ";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusInterpolatedInt() throws IOException {
int i1 = 5;
int i2 = 7;
int expectedResult = i1 + i2;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public int toInt() {return " + i1 + " + $" + i2 + ";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toInt"))
.and(rreturnType(is(int.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toInt", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningStringConcatenation() throws IOException {
String str1 = "H";
String str2 = "i";
String expectedResult = str1 + str2;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return \"" + str1 + "\" + \"" + str2 + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusString() throws IOException {
int i1 = 5;
String str2 = "i";
String expectedResult = i1 + str2;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return " + i1 + " + \"" + str2 + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningClassName() throws IOException {
String expectedResult = jasy.TestClass1.class.getName().replace(".", "/");
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"c=class {+public String getClassName() {return $c.name;}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("getClassName"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("getClassName", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusIntPlusString() throws IOException {
int i1 = 1;
int i2 = 5;
String str3 = "i";
String expectedResult = i1 + i2 + str3;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return " + i1 + " + " + i2 + " + \"" + str3 + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusIntPlusStringPlusIntPlusInt() throws IOException {
int i1 = 1;
int i2 = 4;
String str3 = "i";
int i4 = 5;
int i5 = 7;
String expectedResult = i1 + i2 + str3 + i4 + i5;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return " + i1 + " + " + i2 + " + \"" + str3 + "\" + " + i4 + " + " + i5 + ";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusInt() throws IOException {
int i1 = 1;
int i2 = 4;
int expectedResult = i1 + i2;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public int toInt() {return " + i1 + " + " + i2 + ";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toInt"))
.and(rreturnType(is(int.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toInt", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1StringFieldWithValue() throws IOException {
String str = "myValue";
int i = 7;
String expectedResult = str + i;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private String myField = \"" + str + "\" + " + i + ";}",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(String.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(ifield("myField", ifget(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1IntFieldWithValue() throws IOException {
int expectedResult = 7;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private int myField = " + expectedResult + ";}",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(int.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(ifield("myField", ifget(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1ShortFieldWithValue() throws IOException {
short expectedResult = 7;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private short myField = " + expectedResult + ";}",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(short.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(ifield("myField", ifget(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1LongFieldWithValue() throws IOException {
long expectedResult = 3000000000L;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private long myField = " + expectedResult + "L;}",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(long.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(ifield("myField", ifget(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAddFieldReturnFieldIntMethod() throws IOException {
String myFieldName = "myField";
String myFieldValue = "Hi";
String expectedResult = myFieldName + "=" + myFieldValue;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private String " + myFieldName + " = \"" + myFieldValue + "\"; +public String toString() {return \"" + myFieldName+ "=\" + " + myFieldName + ";} }",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(String.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAddMethodReturnNameOfSingleField() throws IOException {
Field singleField = TestClass1.class.getDeclaredFields()[0];
String expectedResult = singleField.getName();
String src =
"class {\n" +
" fields=;\n" +
" \n" +
" +public String getDescription() {\n" +
" return $fields.get(0).name;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getDescription", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnValueOfMetaVariable() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() {\n" +
" return ${\n" +
" int i = " + expectedResult + ";\n" +
" return i;\n" +
" };\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnValueOfVariable() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() {\n" +
" int i = " + expectedResult + ";\n" +
" return i;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnValueOfVariableWithSeparateDeclaration() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() {\n" +
" int i;\n" +
" i = " + expectedResult + ";\n" +
" return i;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnValueOfSharedMetaVariable() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() ${\n" +
" int i = " + expectedResult + ";\n" +
" return #return $i;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnSumOfVariable() throws IOException {
int i1 = 5;
int i2 = 7;
int expectedResult = i1 + i2;
String src =
"class {\n" +
" +public int getValue() {\n" +
" int i1 = " + i1 + ";\n" +
" int i2 = " + i2 + ";\n" +
" return i1 + i2;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
private static Function<byte[], byte[]> transformClass(ClassResolver resolver, String source) {
ASMCompiler compiler = new ASMCompiler(resolver);
return bytes -> {
try {
ModuleAST module = compiler.compile(new ByteArrayInputStream(source.getBytes("UTF-8")));
ArrayList<Message> errorMessages = new ArrayList<>();
module.resolve(null, null, resolver, errorMessages);
if(errorMessages.size() > 0) {
String msg = errorMessages.stream().map(m -> m.toString()).collect(Collectors.joining("\n"));
throw new RuntimeException(msg);
} else {
Function<Transformation<ClassNode>, Runnable> classTransformer = module.toClassTransformer();
ExhaustiveClassTransformer eTransformer = new ExhaustiveClassTransformer(classTransformer);
byte[] newBytes = eTransformer.transform(bytes);
InputStream classStream = new ByteArrayInputStream(newBytes);
ClassReader classReader = new ClassReader(classStream);
// classReader.accept(new TraceClassVisitor(new PrintWriter(System.out)), 0);
CheckClassAdapter.verify(classReader, false, new PrintWriter(System.out));
// ClassWriter classWriter = new ClassWriter(ClassWriter.COMPUTE_MAXS);
// classReader.accept(classWriter, 0);
// Textifier asmifier = new Textifier();
// classWriter.
return newBytes;
}
} catch (IOException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return bytes;
};
}
private static void testSourceToClasses(String[] classNames, String source, Predicate<Class<?>[]> assertion) throws IOException {
CommonClassMap classMap = new CommonClassMap();
for(String className: classNames)
classMap.addClassName(className);
classMap.addClassName("java.lang.String");
classMap.addClassName("java.lang.Object");
CommonClassResolver resolver = new CommonClassResolver(classMap);
resolver.importPackage("java.lang");
ClassLoader cl = new ProxyClassLoader(ifIn(classNames), classBytesFromName().andThen(transformClass(resolver, source)));
Class<?>[] classes = Arrays.asList(classNames).stream()
.map(className -> {
try {
return cl.loadClass(className);
// return Class.forName(source, true, cl);
} catch (ClassNotFoundException ex) {
Logger.getLogger(SourceToAstTest.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
})
.toArray(size -> new Class<?>[size]);
try {
assertTrue(assertion.test(classes));
} catch(Error e) {
throw e;
}
// Read all classes
// Replace interusages with derived classes
// Derived classes using module
// Assert derived classes
// assertTrue(modulePredicate.test(module));
}
private static Predicate<Class<?>[]> forClass(String name, Predicate<Class<?>> predicate) {
return classes -> {
Class<?> c = Arrays.asList(classes).stream().filter(x -> x.getName().equals(name)).findFirst().get();
return predicate.test(c);
};
}
private static Predicate<Class<?>> chasFieldWhere(Predicate<Field> predicate) {
return c -> Arrays.asList(c.getDeclaredFields()).stream().anyMatch(predicate);
}
private static Predicate<Field> fname(Predicate<String> predicate) {
return f -> predicate.test(f.getName());
}
private static Predicate<Field> ftype(Predicate<Class<?>> predicate) {
return f -> predicate.test(f.getType());
}
private static Predicate<Field> fmodifiers(Predicate<Integer> predicate) {
return f -> predicate.test(f.getModifiers());
}
private static Predicate<Class<?>> chasMethodWhere(Predicate<Method> predicate) {
return c -> Arrays.asList(c.getDeclaredMethods()).stream().anyMatch(predicate);
}
private static Predicate<Class<?>> forInstance(Predicate<Object> predicate) {
return c -> {
try {
Object instance = c.newInstance();
return predicate.test(instance);
} catch (InstantiationException | IllegalAccessException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static Predicate<Object> imethod(String name, BiPredicate<Object, Method> predicate) {
return i -> {
try {
Method m = i.getClass().getDeclaredMethod(name);
return predicate.test(i, m);
} catch (NoSuchMethodException | SecurityException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static Predicate<Object> ifield(String name, BiPredicate<Object, Field> predicate) {
return i -> {
try {
Field m = i.getClass().getDeclaredField(name);
m.setAccessible(true);
return predicate.test(i, m);
} catch (NoSuchFieldException | SecurityException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static BiPredicate<Object, Method> invocationResult(Predicate<Object> predicate) {
return (i, m) -> {
try {
Object result = m.invoke(i);
return predicate.test(result);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static BiPredicate<Object, Field> ifget(Predicate<Object> predicate) {
return (i, f) -> {
try {
Object value = f.get(i);
return predicate.test(value);
} catch (IllegalArgumentException | IllegalAccessException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static Predicate<Integer> isPublic() {
return m -> Modifier.isPublic(m);
}
private static Predicate<Integer> isProtected() {
return m -> Modifier.isProtected(m);
}
private static Predicate<Integer> isPrivate() {
return m -> Modifier.isPrivate(m);
}
private static Predicate<Integer> isStatic() {
return m -> Modifier.isStatic(m);
}
private static Predicate<String> ifIn(String[] names) {
return name -> Arrays.asList(names).contains(name);
}
private static ThreadLocal<Hashtable<String, byte[]>> classBytesCacheMap = new ThreadLocal<Hashtable<String, byte[]>>() {
@Override
protected Hashtable<String, byte[]> initialValue() {
return new Hashtable<String, byte[]>();
}
};
private static Function<String, byte[]> classBytesFromName() {
return name -> {
try {
byte[] cacheBytesCache = classBytesCacheMap.get().get(name);
if(cacheBytesCache == null) {
String s = new java.io.File("build/test/classes/" + name.replace(".", "/") + ".class").getCanonicalFile().toString();
InputStream classStream = new FileInputStream("build/test/classes/" + name.replace(".", "/") + ".class"); //classUrl.openStream();
ClassReader classReader = new ClassReader(classStream);
ClassWriter classWriter = new ClassWriter(ClassWriter.COMPUTE_MAXS);
classReader.accept(classWriter, 0);
cacheBytesCache = classWriter.toByteArray();
classBytesCacheMap.get().put(name, cacheBytesCache);
}
return cacheBytesCache;
} catch (IOException ex) {
Logger.getLogger(SourceToAstTest.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
};
}
private static Predicate<Method> mname(Predicate<String> predicate) {
return m -> predicate.test(m.getName());
}
private static Predicate<? super Method> rreturnType(Predicate<Class<?>> predicate) {
return m -> predicate.test(m.getReturnType());
}
private static Predicate<? super Method> rmodifiers(Predicate<Integer> predicate) {
return m -> predicate.test(m.getModifiers());
}
} |
package jasy;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.Test;
import static org.junit.Assert.*;
import static jasy.Assertion.*;
import static jasy.TemplateSource.*;
import jasy.lang.ASMCompiler;
import jasy.lang.ASMCompiler.Message;
import jasy.lang.ClassResolver;
import jasy.lang.CommonClassMap;
import jasy.lang.CommonClassResolver;
import jasy.lang.ExhaustiveClassTransformer;
import jasy.lang.ast.ModuleAST;
import jasy.lang.ast.Reduction;
import jasy.lang.ast.Transformation;
import java.io.ByteArrayInputStream;
import java.io.PrintWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.nio.file.Paths;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.function.BiPredicate;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.tree.ClassNode;
import org.objectweb.asm.util.CheckClassAdapter;
import org.objectweb.asm.util.TraceClassVisitor;
/**
*
* @author Jakob
*/
public class SourceToClassTest {
@Test
public void testAllClassesAdd1PublicPrimitiveField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public float someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(float.class)))
.and(fmodifiers(isPublic()))
.and(fmodifiers(isStatic().negate()))
))
);
}
@Test
public void testAllClassesAdd1ProtectedPrimitiveField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+protected float someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(float.class)))
.and(fmodifiers(isProtected()))
.and(fmodifiers(isStatic().negate()))
))
);
}
@Test
public void testAllClassesAdd1PrivatePrimitiveField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private float someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(float.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
))
);
}
@Test
public void testAllClassesAdd1PublicStaticPrimitiveField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public static float someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(float.class)))
.and(fmodifiers(isPublic()))
.and(fmodifiers(isStatic()))
))
);
}
@Test
public void testAllClassesAdd1PublicObjectField() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String someField2;}",
forClass("jasy.TestClass1", chasFieldWhere(
fname(is("someField2"))
.and(ftype(is(String.class)))
.and(fmodifiers(isPublic()))
.and(fmodifiers(isStatic().negate()))
))
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningStringLiteral() throws IOException {
String expectedResult = "Hi";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return \"" + expectedResult + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningGeneratedStringLiteral() throws IOException {
String expectedResult = "Hi";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return $\"" + expectedResult + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningXLessThanY() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public boolean compare(int x, int y) {return x < y;}}",
forClass("jasy.TestClass1",
forInstance(imethod("compare", new Class<?>[]{int.class, int.class},
invocationResult(new Object[]{8, 9}, is(true))
.and(
invocationResult(new Object[]{9, 9}, is(false))
).and(
invocationResult(new Object[]{9, 8}, is(false))
)
))
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningXLessThanOrEqualsY() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public boolean compare(int x, int y) {return x <= y;}}",
forClass("jasy.TestClass1",
forInstance(imethod("compare", new Class<?>[]{int.class, int.class},
invocationResult(new Object[]{8, 9}, is(true))
.and(
invocationResult(new Object[]{9, 9}, is(true))
).and(
invocationResult(new Object[]{9, 8}, is(false))
)
))
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningXGreaterThanY() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public boolean compare(int x, int y) {return x > y;}}",
forClass("jasy.TestClass1",
forInstance(imethod("compare", new Class<?>[]{int.class, int.class},
invocationResult(new Object[]{8, 9}, is(false))
.and(
invocationResult(new Object[]{9, 9}, is(false))
).and(
invocationResult(new Object[]{9, 8}, is(true))
)
))
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningXGreaterThanOrEqualsY() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public boolean compare(int x, int y) {return x >= y;}}",
forClass("jasy.TestClass1",
forInstance(imethod("compare", new Class<?>[]{int.class, int.class},
invocationResult(new Object[]{8, 9}, is(false))
.and(
invocationResult(new Object[]{9, 9}, is(true))
).and(
invocationResult(new Object[]{9, 8}, is(true))
)
))
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningXEqualsY() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public boolean compare(int x, int y) {return x == y;}}",
forClass("jasy.TestClass1",
forInstance(imethod("compare", new Class<?>[]{int.class, int.class},
invocationResult(new Object[]{8, 9}, is(false))
.and(
invocationResult(new Object[]{9, 9}, is(true))
).and(
invocationResult(new Object[]{9, 8}, is(false))
)
))
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningXNotEqualsY() throws IOException {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public boolean compare(int x, int y) {return x != y;}}",
forClass("jasy.TestClass1",
forInstance(imethod("compare", new Class<?>[]{int.class, int.class},
invocationResult(new Object[]{8, 9}, is(true))
.and(
invocationResult(new Object[]{9, 9}, is(false))
).and(
invocationResult(new Object[]{9, 8}, is(true))
)
))
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningGeneratedStringConcatenation() throws IOException {
String str1 = "H";
String str2 = "i";
String expectedResult = str1 + str2;
String strConcSrc = "\"" + str1 + "\" + \"" + str2 + "\"";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return $" + strConcSrc + ";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusInterpolatedInt() throws IOException {
int i1 = 5;
int i2 = 7;
int expectedResult = i1 + i2;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public int toInt() {return " + i1 + " + $" + i2 + ";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toInt"))
.and(rreturnType(is(int.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toInt", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningStringConcatenation() throws IOException {
String str1 = "H";
String str2 = "i";
String expectedResult = str1 + str2;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return \"" + str1 + "\" + \"" + str2 + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusString() throws IOException {
int i1 = 5;
String str2 = "i";
String expectedResult = i1 + str2;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return " + i1 + " + \"" + str2 + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningClassName() throws IOException {
String expectedResult = jasy.TestClass1.class.getName().replace(".", "/");
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"c=class {+public String getClassName() {return $c.name;}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("getClassName"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("getClassName", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusIntPlusString() throws IOException {
int i1 = 1;
int i2 = 5;
String str3 = "i";
String expectedResult = i1 + i2 + str3;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return " + i1 + " + " + i2 + " + \"" + str3 + "\";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusIntPlusStringPlusIntPlusInt() throws IOException {
int i1 = 1;
int i2 = 4;
String str3 = "i";
int i4 = 5;
int i5 = 7;
String expectedResult = i1 + i2 + str3 + i4 + i5;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public String toString() {return " + i1 + " + " + i2 + " + \"" + str3 + "\" + " + i4 + " + " + i5 + ";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toString"))
.and(rreturnType(is(String.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1PublicMethodReturningIntPlusInt() throws IOException {
int i1 = 1;
int i2 = 4;
int expectedResult = i1 + i2;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+public int toInt() {return " + i1 + " + " + i2 + ";}}",
forClass("jasy.TestClass1",
chasMethodWhere(
mname(is("toInt"))
.and(rreturnType(is(int.class)))
.and(rmodifiers(isPublic()))
.and(rmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toInt", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1StringFieldWithValue() throws IOException {
String str = "myValue";
int i = 7;
String expectedResult = str + i;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private String myField = \"" + str + "\" + " + i + ";}",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(String.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(ifield("myField", ifget(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1IntFieldWithValue() throws IOException {
int expectedResult = 7;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private int myField = " + expectedResult + ";}",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(int.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(ifield("myField", ifget(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1ShortFieldWithValue() throws IOException {
short expectedResult = 7;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private short myField = " + expectedResult + ";}",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(short.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(ifield("myField", ifget(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAdd1LongFieldWithValue() throws IOException {
long expectedResult = 3000000000L;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private long myField = " + expectedResult + "L;}",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(long.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(ifield("myField", ifget(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAddFieldReturnFieldIntMethod() throws IOException {
String myFieldName = "myField";
String myFieldValue = "Hi";
String expectedResult = myFieldName + "=" + myFieldValue;
testSourceToClasses(
new String[]{"jasy.TestClass1"},
"class {+private String " + myFieldName + " = \"" + myFieldValue + "\"; +public String toString() {return \"" + myFieldName+ "=\" + " + myFieldName + ";} }",
forClass("jasy.TestClass1",
chasFieldWhere(
fname(is("myField"))
.and(ftype(is(String.class)))
.and(fmodifiers(isPrivate()))
.and(fmodifiers(isStatic().negate()))
).and(
forInstance(imethod("toString", invocationResult(is(expectedResult))))
)
)
);
}
@Test
public void testAllClassesAddMethodReturnNameOfSingleField() throws IOException {
Field singleField = TestClass1.class.getDeclaredFields()[0];
String expectedResult = singleField.getName();
String src =
"class {\n" +
" fields=;\n" +
" \n" +
" +public String getDescription() {\n" +
" return $fields.get(0).name;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getDescription", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodWithNew() throws IOException {
String src =
"class {\n" +
" +public StringBuilder createStringBuilder() {\n" +
" return new StringBuilder();\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("createStringBuilder", invocationResult(instanceOf(StringBuilder.class))))
)
);
}
@Test
public void testAllClassesAddMethodWithVariableAssignAddInt() throws IOException {
String src =
"class {\n" +
" +public int getInt() {\n" +
" int i = 0;\n" +
" i += 1;\n" +
" return 1;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getInt", invocationResult(is(1))))
)
);
}
@Test
public void testAllClassesAddMethodWithVariableAssignAddString() throws IOException {
String src =
"class {\n" +
" +public String getString() {\n" +
" String str = \"\";\n" +
" str += \"Hi\";\n" +
" return str;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getString", invocationResult(is("Hi"))))
)
);
}
private static Number toNumber(long value, String type) {
switch(type) {
case "byte":
return (byte)value;
case "short":
return (short)value;
case "int":
return (int)value;
case "long":
return value;
case "float":
return (float)value;
case "double":
return (double)value;
}
return null;
}
@Test
public void testAllClassesAddMethodWithPrefixInc2() {
long start = 0;
long i = start;
long j = ++i;
long expectedResultBase = i + j;
testAllClassesAddMethodWithIncDec(start, expectedResultBase, "++i");
}
@Test
public void testAllClassesAddMethodWithPostfixInc2() {
long start = 0;
long i = start;
long j = i++;
long expectedResultBase = i + j;
testAllClassesAddMethodWithIncDec(start, expectedResultBase, "i++");
}
@Test
public void testAllClassesAddMethodWithPrefixDec2() {
long start = 0;
long i = start;
long j = --i;
long expectedResultBase = i + j;
testAllClassesAddMethodWithIncDec(start, expectedResultBase, "--i");
}
@Test
public void testAllClassesAddMethodWithPostfixDec2() {
long start = 0;
long i = start;
long j = i
long expectedResultBase = i + j;
testAllClassesAddMethodWithIncDec(start, expectedResultBase, "i
}
private static final String incDecTemplateSrcMethodName = "incDec";
private static final String incDecTemplateSrc =
"class {\n" +
" +public <<type>> " + incDecTemplateSrcMethodName + "() {\n" +
" <<type>> i = <<start>>;\n" +
" <<type>> j = <<incDec>>;\n" +
" return i + j;\n" +
" }\n" +
"}\n";
public void testAllClassesAddMethodWithIncDec(long start, long expectedResultBase, String incDec) {
SourceCode sourceCodeBase =
TemplateSource.expand(incDecTemplateSrc, map(entry("incDec", incDec))).get(0);
primitiveNumberTypes.forEach(type -> {
String resultType = type;
String value = toSourceCode(start, type);
SourceCode sourceCode = TemplateSource.expand(sourceCodeBase.src,
map(entry("type", type), entry("start", value))
).get(0);
Number expectedValue = toNumber(expectedResultBase, resultType);
try {
testSourceToClasses(new String[]{"jasy.TestClass1"},
sourceCode.src,
forClass("jasy.TestClass1",
forInstance(imethod(incDecTemplateSrcMethodName, invocationResult(is(expectedValue))))
)
);
} catch (IOException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
});
}
@Test
public void testAllClassesAddMethodWithNewWithArguments() throws IOException {
String expectedResult = "Some text";
String src =
"class {\n" +
" +public String createStringBuilder() {\n" +
" return new StringBuilder(\"" + expectedResult + "\").toString();\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("createStringBuilder", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnNamesOfAllFields() throws IOException, InstantiationException, IllegalAccessException {
Object instance = TestClass2.class.newInstance();
Field[] fields = TestClass2.class.getDeclaredFields();
String expectedResult = "";
for(int i = 0; i < fields.length; i++) {
Field f = fields[i];
f.setAccessible(true);
if(i > 0)
expectedResult += ", ";
expectedResult += f.getName() + " = " + f.get(instance);
}
String src =
"class {\n" +
" fields=;\n" +
" \n" +
" +public String getDescription() ${\n" +
" CodeAST statements =
" for(int i = 0; i < fields.size(); i++) {\n" +
" FieldNode f = fields.get(i);\n" +
" if(i > 0)\n" +
" statements += #sb.append(\", \");\n" +
" statements += #sb.append(($f.name) + \" = \" + (:$f.name));\n" +
" }\n" +
" return
" StringBuilder sb = new StringBuilder();\n" +
" $statements;\n" +
" return sb.toString();\n" +
" };\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass2"},
src,
forClass("jasy.TestClass2",
forInstance(imethod("getDescription", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnNameAndValueOfFirstField() throws IOException {
Field singleField = TestClass1.class.getDeclaredFields()[0];
int expectedValue = 0;
String expectedResult = singleField.getName() + " = " + expectedValue;
String src =
"class {\n" +
" fields=;\n" +
" +public Object getDescription() {\n" +
" return ($fields.get(0).name) + \" = \" + (:$fields.get(0).name);\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getDescription", invocationResult(is(expectedResult))))
)
);
}
// @Test
// public void testAllClassesAddMethodReturnValueOfMetaVariable() throws IOException {
// int expectedResult = 5;
// String src =
// "class {\n" +
// " +public int getValue() {\n" +
// " ${int i = " + expectedResult + ";}\n" +
// " return $i;\n" +
// testSourceToClasses(
// new String[]{"jasy.TestClass1"},
// src,
// forClass("jasy.TestClass1",
// forInstance(imethod("getValue", invocationResult(is(expectedResult))))
@Test
public void testAllClassesAddMethodReturnValueOfVariable() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() {\n" +
" int i = " + expectedResult + ";\n" +
" return i;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnValueOfVariableWithSeparateDeclaration() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() {\n" +
" int i;\n" +
" i = " + expectedResult + ";\n" +
" return i;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnValueOfSharedMetaVariable() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() ${\n" +
" int i = " + expectedResult + ";\n" +
" return #return $i;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodGenerateFromRootQuote() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() {\n" +
" int i = " + expectedResult + ";\n" +
" $#return i;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodWhichGenerateQuotedBlock() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() ${\n" +
" return
" int i = " + expectedResult + ";\n" +
" return i;\n" +
" };\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodWhichGenerateQuotedBlockWithInjection() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() ${\n" +
" return
" int i;\n" +
" $#i = " + expectedResult + ";\n" +
" return i;\n" +
" };\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodWhichGeneratesAndInterpolateQuotedBlock() throws IOException {
int i1 = 5;
int i2 = 7;
int expectedResult = i1 + i2;
// String src =
// "class {\n" +
// " +public int getValue() ${\n" +
// " ArrayList<CodeAST> statements = new ArrayList<CodeAST>();\n" +
// " statements.add(#i1 = " + i1 + ");\n" +
// " statements.add(#i2 = " + i2 + ");\n" +
// " return
// " int i1;\n" +
// " int i2;\n" +
// " $statements\n" +
// " return i1 + i2;\n" +
String src =
"class {\n" +
" +public int getValue() ${\n" +
" jasy.lang.ast.CodeAST statements = \n" +
" (#i1 = " + i1 + ") +\n" +
" (#i2 = " + i2 + ");\n" +
" return
" int i1;\n" +
" int i2;\n" +
" $statements;\n" +
" return i1 + i2;\n" +
" };\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnValueOfSumOfSharedMetaVariables() throws IOException {
int i1 = 5;
int i2 = 7;
int expectedResult = i1 + i2;
String src =
"class {\n" +
" +public int getValue() ${\n" +
" int i1 = " + i1 + ";\n" +
" int i2 = " + i2 + ";\n" +
" return #return $i1 + i2;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodReturnSumOfVariable() throws IOException {
int i1 = 5;
int i2 = 7;
int expectedResult = i1 + i2;
String src =
"class {\n" +
" +public int getValue() {\n" +
" int i1 = " + i1 + ";\n" +
" int i2 = " + i2 + ";\n" +
" return i1 + i2;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodWithInjection() throws IOException {
int expectedResult = 5;
String src =
"class {\n" +
" +public int getValue() {\n" +
" int i;\n" +
" $#i = " + expectedResult + ";\n" +
" return i;\n" +
" }\n" +
"}\n";
testSourceToClasses(
new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
}
@Test
public void testAllClassesAddMethodWithWhileCounting() throws IOException {
int counterStart = 0;
int counterEnd = 10;
int valueStart = 0;
int valueIncrement = 6;
int expectedResult = valueStart + (counterEnd - counterStart) * valueIncrement;
String templaceSrc =
"class {\n" +
" +public int getValue() {\n" +
" int i = <<init>>;\n" +
" int value = " + valueStart + ";\n" +
" while(<<cond>>) {\n" +
" value += " + valueIncrement + ";\n" +
" <<inc>>;\n" +
" }\n" +
" return value;\n" +
" }\n" +
"}\n";
expand(templaceSrc,
map(entry("init", "" + counterStart), entry("cond", "i < " + counterEnd), entry("inc", "i++")),
map(entry("init", "" + counterStart), entry("cond", "i < " + counterEnd), entry("inc", "++i")),
map(entry("init", "" + counterEnd), entry("cond", "i > " + counterStart), entry("inc", "i
map(entry("init", "" + counterEnd), entry("cond", "i > " + counterStart), entry("inc", "--i"))
).forEach(combination -> {
try {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
combination.src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
} catch (IOException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
});
}
@Test
public void testAllClassesAddMethodWithForCounting() throws IOException {
int counterStart = 0;
int counterEnd = 10;
int valueStart = 0;
int valueIncrement = 6;
int expectedResult = valueStart + (counterEnd - counterStart) * valueIncrement;
String templaceSrc =
"class {\n" +
" +public int getValue() {\n" +
" int value = " + valueStart + ";\n" +
" for(int i = <<init>>; <<cond>>; <<inc>>) {\n" +
" value += " + valueIncrement + ";\n" +
" }\n" +
" return value;\n" +
" }\n" +
"}\n";
expand(templaceSrc,
map(entry("init", "" + counterStart), entry("cond", "i < " + counterEnd), entry("inc", "i++")),
map(entry("init", "" + counterStart), entry("cond", "i < " + counterEnd), entry("inc", "++i")),
map(entry("init", "" + counterEnd), entry("cond", "i > " + counterStart), entry("inc", "i
map(entry("init", "" + counterEnd), entry("cond", "i > " + counterStart), entry("inc", "--i"))
).forEach(combination -> {
try {
testSourceToClasses(
new String[]{"jasy.TestClass1"},
combination.src,
forClass("jasy.TestClass1",
forInstance(imethod("getValue", invocationResult(is(expectedResult))))
)
);
} catch (IOException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
});
}
@Test
public void testAllClassesAddMethodWithReturnInIfElse() throws IOException {
int trueIfGT = 10;
String src =
"class {\n" +
" +public boolean gt(int x) {\n" +
" if(x > " + trueIfGT + ")\n" +
" return true;\n" +
" else\n" +
" return false;\n" +
" }\n" +
"}\n";
testSourceToClasses(new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("gt", new Class<?>[]{int.class}, invocationResult(new Object[]{trueIfGT + 1}, is(true))))
.and(
forInstance(imethod("gt", new Class<?>[]{int.class}, invocationResult(new Object[]{trueIfGT}, is(false))))
)
)
);
}
@Test
public void testAllClassesAddMethodWithReturnInIf() throws IOException {
int trueIfGT = 10;
String src =
"class {\n" +
" +public boolean gt(int x) {\n" +
" if(x > " + trueIfGT + ")\n" +
" return true;\n" +
" \n" +
" return false;\n" +
" }\n" +
"}\n";
testSourceToClasses(new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("gt", new Class<?>[]{int.class}, invocationResult(new Object[]{trueIfGT + 1}, is(true))))
.and(
forInstance(imethod("gt", new Class<?>[]{int.class}, invocationResult(new Object[]{trueIfGT}, is(false))))
)
)
);
}
@Test
public void testAllClassesAddMethodWithParametersAndVariables() throws IOException {
int x = 10;
int a = 15;
int y = 5;
int b = 56;
int expectedValue = x + a + y + b;
String src =
"class {\n" +
" +public int gt(int x, int a) {\n" +
" int y = " + y + ";\n" +
" int b = " + b + ";\n" +
" return x + a + y + b;\n" +
" }\n" +
"}\n";
testSourceToClasses(new String[]{"jasy.TestClass1"},
src,
forClass("jasy.TestClass1",
forInstance(imethod("gt", new Class<?>[]{int.class, int.class}, invocationResult(new Object[]{x, a}, is(expectedValue))))
)
);
}
private static class Combination<T> {
public final T first;
public final T second;
public Combination(T first, T second) {
this.first = first;
this.second = second;
}
}
private static <T> List<Combination<T>> combine(List<T> elements) {
ArrayList<Combination<T>> combinations = new ArrayList<>();
elements.forEach(x ->
elements.forEach(y ->
combinations.add(new Combination<>(x, y))
)
);
return combinations;
}
private static final String reductionTemplateSrcMethodName = "reduce";
private static final String reductionTemplateSrc =
"class {\n" +
" +public <<type>> " + reductionTemplateSrcMethodName + "() {\n" +
" <<lhsType>> x = <<lhs>>;\n" +
" <<rhsType>> y = <<rhs>>;\n" +
" return x <<op>> y;\n" +
" }\n" +
"}\n";
private static final List<String> primitiveNumberTypes =
Arrays.asList("byte", "short", "int", "long", "float", "double");
private static String toSourceCode(long value, String type) {
switch(type) {
case "byte":
case "short":
case "int":
return "" + value;
case "long":
return "" + value + "L";
case "float":
return "" + value + ".0F";
case "double":
return "" + value + ".0";
}
throw new IllegalArgumentException("Cannot convert '" + type + "' into source code.");
}
@Test
public void testAllClassesAddMethodWithAddExpression() {
long lhs = 5;
long rhs = 7;
long expectedValueRaw = lhs + rhs;
testAllClassesAddMethodWithReduction(lhs, rhs, expectedValueRaw, "+");
}
@Test
public void testAllClassesAddMethodWithSubExpression() {
long lhs = 5;
long rhs = 7;
long expectedValueRaw = lhs - rhs;
testAllClassesAddMethodWithReduction(lhs, rhs, expectedValueRaw, "-");
}
@Test
public void testAllClassesAddMethodWithMultExpression() {
long lhs = 5;
long rhs = 7;
long expectedValueRaw = lhs * rhs;
testAllClassesAddMethodWithReduction(lhs, rhs, expectedValueRaw, "*");
}
@Test
public void testAllClassesAddMethodWithDivExpression() {
long lhs = 10;
long rhs = 5;
long expectedValueRaw = lhs / rhs;
testAllClassesAddMethodWithReduction(lhs, rhs, expectedValueRaw, "/");
}
@Test
public void testAllClassesAddMethodWithRemExpression() {
long lhs = 11;
long rhs = 5;
long expectedValueRaw = lhs % rhs;
testAllClassesAddMethodWithReduction(lhs, rhs, expectedValueRaw, "%");
}
public void testAllClassesAddMethodWithReduction(long lhs, long rhs, long expectedValueRaw, String op) {
SourceCode addSourceCode = TemplateSource.expand(reductionTemplateSrc,
map(entry("op", op))
).get(0);
combine(primitiveNumberTypes).forEach(typeCombination -> {
String lhsType = typeCombination.first;
String rhsType = typeCombination.second;
String resultType = Reduction.typeOf(lhsType, rhsType).getSimpleName();
String lhsValue = toSourceCode(lhs, lhsType);
String rhsValue = toSourceCode(rhs, rhsType);
SourceCode sourceCode = TemplateSource.expand(addSourceCode.src,
map(entry("type", resultType), entry("lhsType", lhsType), entry("rhsType", rhsType), entry("lhs", "" + lhsValue), entry("rhs", "" + rhsValue))
).get(0);
Number expectedValue = toNumber(expectedValueRaw, resultType);
try {
testSourceToClasses(new String[]{"jasy.TestClass1"},
sourceCode.src,
forClass("jasy.TestClass1",
forInstance(imethod(reductionTemplateSrcMethodName, invocationResult(is(expectedValue))))
)
);
} catch (IOException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
});
}
private static Function<byte[], byte[]> transformClass(ClassResolver resolver, String source) {
ASMCompiler compiler = new ASMCompiler(resolver);
return bytes -> {
try {
ModuleAST module = compiler.compile(new ByteArrayInputStream(source.getBytes("UTF-8")));
ArrayList<Message> errorMessages = new ArrayList<>();
module.resolve(null, null, resolver, errorMessages);
if(errorMessages.size() > 0) {
String msg = errorMessages.stream().map(m -> m.toString()).collect(Collectors.joining("\n"));
throw new RuntimeException(msg);
} else {
Function<Transformation<ClassNode>, Runnable> classTransformer = module.toClassTransformer();
ExhaustiveClassTransformer eTransformer = new ExhaustiveClassTransformer(classTransformer);
byte[] newBytes = eTransformer.transform(bytes);
InputStream classStream = new ByteArrayInputStream(newBytes);
ClassReader classReader = new ClassReader(classStream);
// classReader.accept(new TraceClassVisitor(new PrintWriter(System.out)), 0);
CheckClassAdapter.verify(classReader, false, new PrintWriter(System.out));
// ClassWriter classWriter = new ClassWriter(ClassWriter.COMPUTE_MAXS);
// classReader.accept(classWriter, 0);
// Textifier asmifier = new Textifier();
// classWriter.
return newBytes;
}
} catch (IOException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return bytes;
};
}
private static void testSourceToClasses(String[] classNames, String source, Predicate<Class<?>[]> assertion) throws IOException {
CommonClassMap classMap = new CommonClassMap();
for(String className: classNames)
classMap.addClassName(className);
classMap.addClassName("java.lang.String");
classMap.addClassName("java.lang.Object");
classMap.addClassName("java.lang.StringBuilder");
classMap.addClassName("jasy.lang.ast.CodeAST");
classMap.addClassName("org.objectweb.asm.tree.FieldNode");
CommonClassResolver resolver = new CommonClassResolver(classMap);
resolver.importPackage("java.lang");
resolver.importPackage("jasy.lang.ast");
resolver.importPackage("org.objectweb.asm.tree");
ClassLoader cl = new ProxyClassLoader(ifIn(classNames), classBytesFromName().andThen(transformClass(resolver, source)));
Class<?>[] classes = Arrays.asList(classNames).stream()
.map(className -> {
try {
return cl.loadClass(className);
// return Class.forName(source, true, cl);
} catch (ClassNotFoundException ex) {
Logger.getLogger(SourceToAstTest.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
})
.toArray(size -> new Class<?>[size]);
try {
assertTrue(assertion.test(classes));
} catch(Error e) {
throw e;
}
// Read all classes
// Replace interusages with derived classes
// Derived classes using module
// Assert derived classes
// assertTrue(modulePredicate.test(module));
}
private static Predicate<Class<?>[]> forClass(String name, Predicate<Class<?>> predicate) {
return classes -> {
Class<?> c = Arrays.asList(classes).stream().filter(x -> x.getName().equals(name)).findFirst().get();
return predicate.test(c);
};
}
private static Predicate<Class<?>> chasFieldWhere(Predicate<Field> predicate) {
return c -> Arrays.asList(c.getDeclaredFields()).stream().anyMatch(predicate);
}
private static Predicate<Field> fname(Predicate<String> predicate) {
return f -> predicate.test(f.getName());
}
private static Predicate<Field> ftype(Predicate<Class<?>> predicate) {
return f -> predicate.test(f.getType());
}
private static Predicate<Field> fmodifiers(Predicate<Integer> predicate) {
return f -> predicate.test(f.getModifiers());
}
private static Predicate<Class<?>> chasMethodWhere(Predicate<Method> predicate) {
return c -> Arrays.asList(c.getDeclaredMethods()).stream().anyMatch(predicate);
}
private static Predicate<Class<?>> forInstance(Predicate<Object> predicate) {
return c -> {
try {
Object instance = c.newInstance();
return predicate.test(instance);
} catch (InstantiationException | IllegalAccessException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static Predicate<Object> imethod(String name, BiPredicate<Object, Method> predicate) {
return imethod(name, new Class<?>[0], predicate);
}
private static Predicate<Object> imethod(String name, Class<?>[] parameterTypes, BiPredicate<Object, Method> predicate) {
return i -> {
try {
Method m = i.getClass().getDeclaredMethod(name, parameterTypes);
return predicate.test(i, m);
} catch (NoSuchMethodException | SecurityException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static Predicate<Object> ifield(String name, BiPredicate<Object, Field> predicate) {
return i -> {
try {
Field m = i.getClass().getDeclaredField(name);
m.setAccessible(true);
return predicate.test(i, m);
} catch (NoSuchFieldException | SecurityException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static BiPredicate<Object, Method> invocationResult(Predicate<Object> predicate) {
return invocationResult(new Object[0], predicate);
}
private static BiPredicate<Object, Method> invocationResult(Object[] args, Predicate<Object> predicate) {
return (i, m) -> {
try {
Object result = m.invoke(i, args);
System.out.println("Invocation result:\n" + result);
return predicate.test(result);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static BiPredicate<Object, Field> ifget(Predicate<Object> predicate) {
return (i, f) -> {
try {
Object value = f.get(i);
return predicate.test(value);
} catch (IllegalArgumentException | IllegalAccessException ex) {
Logger.getLogger(SourceToClassTest.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
};
}
private static Predicate<Integer> isPublic() {
return m -> Modifier.isPublic(m);
}
private static Predicate<Integer> isProtected() {
return m -> Modifier.isProtected(m);
}
private static Predicate<Integer> isPrivate() {
return m -> Modifier.isPrivate(m);
}
private static Predicate<Integer> isStatic() {
return m -> Modifier.isStatic(m);
}
private static Predicate<String> ifIn(String[] names) {
return name -> Arrays.asList(names).contains(name);
}
private static ThreadLocal<Hashtable<String, byte[]>> classBytesCacheMap = new ThreadLocal<Hashtable<String, byte[]>>() {
@Override
protected Hashtable<String, byte[]> initialValue() {
return new Hashtable<String, byte[]>();
}
};
private static Function<String, byte[]> classBytesFromName() {
return name -> {
try {
byte[] cacheBytesCache = classBytesCacheMap.get().get(name);
if(cacheBytesCache == null) {
String s = new java.io.File("build/test/classes/" + name.replace(".", "/") + ".class").getCanonicalFile().toString();
InputStream classStream = new FileInputStream("build/test/classes/" + name.replace(".", "/") + ".class"); //classUrl.openStream();
ClassReader classReader = new ClassReader(classStream);
ClassWriter classWriter = new ClassWriter(ClassWriter.COMPUTE_MAXS);
classReader.accept(classWriter, 0);
cacheBytesCache = classWriter.toByteArray();
classBytesCacheMap.get().put(name, cacheBytesCache);
}
return cacheBytesCache;
} catch (IOException ex) {
Logger.getLogger(SourceToAstTest.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
};
}
private static Predicate<Method> mname(Predicate<String> predicate) {
return m -> predicate.test(m.getName());
}
private static Predicate<? super Method> rreturnType(Predicate<Class<?>> predicate) {
return m -> predicate.test(m.getReturnType());
}
private static Predicate<? super Method> rmodifiers(Predicate<Integer> predicate) {
return m -> predicate.test(m.getModifiers());
}
} |
package dk.cmol.arduinorgb_controller;
import java.io.IOException;
import java.io.OutputStream;
import java.net.Socket;
import android.util.Log;
public class ArduinoSocket {
private ArduinoRGBActivity parent = null;
private Socket sock = null;
private OutputStream stream = null;
private String ip = null;
private int port;
public ArduinoSocket(ArduinoRGBActivity parent) {
this.parent = parent;
}
public void write(final byte[] buffer) {
Thread thread = new Thread(new Runnable(){
@Override
public void run() {
connect();
try {
stream.write(buffer);
} catch (IOException e) {
// TODO Auto-generated catch block
Log.e("ArduinoSocket", e.getMessage());
e.printStackTrace();
}
close();
}
});
thread.start();
}
private void connect() {
getPreferences();
try {
sock = new Socket(ip, port);
stream = sock.getOutputStream();
} catch (IOException e) {
// TODO: Write error to parent view
e.printStackTrace();
Log.e("ArduinoSocket", e.getMessage());
}
}
private void getPreferences() {
// TODO: Read prefs from key-stor values
Log.i("ArduinoSocket", "Setting IP and PORT");
ip = "172.16.0.5";
port = 2000;
}
public void close() {
try {
sock.close();
} catch (IOException e) {
// TODO Write error to parent view
e.printStackTrace();
Log.e("ArduinoSocket", e.getMessage());
}
}
} |
package dr.evomodel.substmodel;
import dr.evolution.datatype.DataType;
import dr.inference.glm.GeneralizedLinearModel;
import dr.inference.loggers.LogColumn;
import dr.inference.model.BayesianStochasticSearchVariableSelection;
import dr.inference.model.Model;
import dr.util.Citation;
import dr.util.CommonCitations;
import java.util.Collections;
import java.util.List;
/**
* @author Marc A. Suchard
*/
public class GLMSubstitutionModel extends ComplexSubstitutionModel {
public GLMSubstitutionModel(String name, DataType dataType, FrequencyModel rootFreqModel,
GeneralizedLinearModel glm) {
super(name, dataType, rootFreqModel, null);
this.glm = glm;
addModel(glm);
testProbabilities = new double[stateCount*stateCount];
}
public double[] getRates() {
return glm.getXBeta();
}
protected void handleModelChangedEvent(Model model, Object object, int index) {
if (model == glm) {
updateMatrix = true;
fireModelChanged();
} else
super.handleModelChangedEvent(model, object, index);
}
// This info can be gotten from the GLM
// public LogColumn[] getColumns() {
// return glm.getColumns();
public LogColumn[] getColumns() {
//Aggregate columns from ComplexSubstitutionModel with glm.columns
LogColumn[] aggregated = new LogColumn[glm.getColumns().length + 2];
int index = 0;
for (LogColumn col : glm.getColumns()) {
aggregated[index] = col;
index++;
}
aggregated[index++] = new LikelihoodColumn(getId() + ".L");
aggregated[index++] = new NormalizationColumn(getId() + ".Norm");
return aggregated;
//return glm.getColumns();
}
public double getLogLikelihood() {
double logL = super.getLogLikelihood();
if (logL == 0 &&
BayesianStochasticSearchVariableSelection.Utils.connectedAndWellConditioned(testProbabilities, this)) {
// Also check that graph is connected
return 0;
}
return Double.NEGATIVE_INFINITY;
}
@Override
public String getDescription() {
return "Generalized linear (model, GLM) substitution model"; // TODO Horrible; fix
}
@Override
public List<Citation> getCitations() {
return Collections.singletonList(CommonCitations.LEMEY_2014_UNIFYING);
}
private GeneralizedLinearModel glm;
private double[] testProbabilities;
} |
package edu.isi.pegasus.planner.classes;
import edu.isi.pegasus.planner.namespace.Namespace;
import edu.isi.pegasus.planner.namespace.Pegasus;
import edu.isi.pegasus.planner.namespace.Selector;
import edu.isi.pegasus.planner.partitioner.graph.GraphNode;
import edu.isi.pegasus.planner.partitioner.graph.MapGraph;
import java.util.LinkedList;
import java.util.List;
/**
* A stub data flow job for DECAF integration
*
* @author Karan Vahi
*/
public class DataFlowJob extends AggregatedJob{
private List<Link> mEdges;
/**
* The default constructor.
*/
public DataFlowJob() {
super();
mEdges = new LinkedList();
//data flow job cannot be executed via kickstart
this.vdsNS.construct( Pegasus.GRIDSTART_KEY, "none" );
}
/**
* The overloaded constructor.
*
* @param job the job whose shallow copy is created, and is the main job.
*/
public DataFlowJob( Job job ) {
this( job , -1 );
}
/**
* The overloaded constructor.
*
* @param job the job whose shallow copy is created, and is the main job.
* @param num the number of constituent jobs.
*/
public DataFlowJob(Job job,int num) {
super(job, num );
mEdges = new LinkedList();
//data flow job cannot be executed via kickstart
this.vdsNS.construct( Pegasus.GRIDSTART_KEY, "none" );
}
/**
* Add Link
*
* @param e
*/
public void addEdge( Link e ){
mEdges.add( e );
}
/**
* A link job to indicate a job in the data flow that does data transformation
* between two jobs
*/
public static class Link extends Job{
private GraphNode mParentJob;
private GraphNode mChildJob;
public Link(){
super();
}
public void setLink( String parent, String child ){
mParentJob = new GraphNode( parent );
mChildJob = new GraphNode( child );
}
public String getParentID(){
return mParentJob.getID();
}
public String getChildID(){
return mChildJob.getID();
}
@Override
public String toString() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
} |
package com.brein.time.timeseries.gson;
import com.brein.time.timeseries.BucketTimeSeries;
import com.brein.time.timeseries.BucketTimeSeriesConfig;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import com.google.gson.JsonSerializationContext;
import org.apache.log4j.Logger;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
public class TypeConverterHelper {
private static final Logger LOG = Logger.getLogger(TypeConverterHelper.class);
public static JsonObject serialize(final BucketTimeSeries<?> o, final JsonSerializationContext context) {
final JsonObject jsonObject = new JsonObject();
// the configuration
jsonObject.add("timeUnit", context.serialize(o.getConfig().getTimeUnit()));
jsonObject.add("bucketSize", context.serialize(o.getConfig().getBucketSize()));
jsonObject.add("bucketContent", TypeConverterHelper.serializeClass(o.getConfig().getBucketContent(), context));
jsonObject.add("fillNumberWithZero", context.serialize(o.getConfig().isFillNumberWithZero()));
//TODO: Check if contents are containers and store their template classes as well
// the values
jsonObject.add("timeSeries", context.serialize(o.order()));
jsonObject.add("now", context.serialize(o.getNow()));
return jsonObject;
}
public static Object[] deserialize(final JsonElement jsonElement, final JsonDeserializationContext context) {
return deserialize(jsonElement, context, (bucketContent, el) -> context.deserialize(el, TypeConverterHelper.arrayClass(bucketContent)));
}
public static Object[] deserialize(final JsonElement jsonElement, final JsonDeserializationContext context, final BiFunction<Class<?>, JsonElement, Serializable[]> timeSeriesDeserializer) {
final JsonObject jsonObject = jsonElement.getAsJsonObject();
if (LOG.isTraceEnabled()) {
LOG.trace(String.format("Trying to deserialize the element '%s'.", jsonElement.toString()));
}
// get the important classes
final Class<?> bucketContent = resolveClass("bucketContent", jsonObject, context);
// it may happen that we have an invalid version or type
if (bucketContent == null) {
return null;
}
// configuration
final TimeUnit timeUnit = context.deserialize(jsonObject.get("timeUnit"), TimeUnit.class);
final int bucketSize = context.deserialize(jsonObject.get("bucketSize"), int.class);
final boolean fillNumberWithZero = context.deserialize(jsonObject.get("fillNumberWithZero"), boolean.class);
// the values
final Long now = context.deserialize(jsonObject.get("now"), Long.class);
final Serializable[] timeSeries = timeSeriesDeserializer.apply(bucketContent, jsonObject.get("timeSeries"));
@SuppressWarnings("unchecked")
final BucketTimeSeriesConfig config = new BucketTimeSeriesConfig(bucketContent, timeUnit, timeSeries.length, bucketSize, fillNumberWithZero);
return new Object[]{config, timeSeries, now};
}
@SuppressWarnings("unchecked")
public static <T> Class<T> resolveClass(final String key, final JsonObject jsonObject, final JsonDeserializationContext context) {
final String clazz = context.deserialize(jsonObject.get(key), String.class);
if (clazz == null) {
return null;
} else {
try {
return (Class<T>) Class.forName(clazz);
} catch (final ClassNotFoundException e) {
throw new JsonParseException("Cannot resolve class.", e);
}
}
}
public static JsonElement serializeClass(final Class<?> clazz, final JsonSerializationContext context) {
return context.serialize((clazz == null ? Object.class : clazz).getCanonicalName());
}
public static Class<?> arrayClass(final Class<?> clazz) {
return Array.newInstance(clazz, 0).getClass();
}
} |
package edu.mit.streamjit.impl.common;
import com.google.common.base.Function;
import static com.google.common.base.Preconditions.*;
import com.google.common.base.Strings;
import com.google.common.collect.BoundType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Range;
import com.google.common.primitives.Ints;
import edu.mit.streamjit.api.Identity;
import edu.mit.streamjit.api.Pipeline;
import edu.mit.streamjit.api.Worker;
import edu.mit.streamjit.impl.blob.BlobFactory;
import edu.mit.streamjit.impl.interp.Interpreter;
import edu.mit.streamjit.util.ReflectionUtils;
import edu.mit.streamjit.util.json.Jsonifier;
import edu.mit.streamjit.util.json.JsonifierFactory;
import edu.mit.streamjit.util.json.Jsonifiers;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import javax.json.Json;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonValue;
/**
* A Configuration contains parameters that can be manipulated by the autotuner
* (or other things).
*
* Instances of this class are immutable. This class uses the builder pattern;
* to create a Configuration, get a builder by calling Configuration.builder(),
* add parameters or subconfigurations to it, then call the builder's build()
* method to build the configuration.
*
* Unless otherwise specified, passing null or an empty string to this class'
* or any parameter class' methods will result in a NullPointerException being
* thrown.
* @author Jeffrey Bosboom <jeffreybosboom@gmail.com>
* @since 3/23/2013
*/
public final class Configuration {
private final ImmutableMap<String, Parameter> parameters;
private final ImmutableMap<String, Configuration> subconfigurations;
private Configuration(ImmutableMap<String, Parameter> parameters, ImmutableMap<String, Configuration> subconfigurations) {
//We're only called by the builder, so assert, not throw IAE.
assert parameters != null;
assert subconfigurations != null;
this.parameters = parameters;
this.subconfigurations = subconfigurations;
}
/**
* Builds Configuration instances. Parameters and subconfigurations can be
* added or removed from this builder; calling build() creates a
* Configuration from the current builder state. Note that build() may be
* called more than once; combined with clone(), this allows creating
* "prototype" builders that can be cloned, customized, and built.
*/
public static final class Builder implements Cloneable {
private final Map<String, Parameter> parameters;
private final Map<String, Configuration> subconfigurations;
/**
* Constructs a new Builder. Called only by Configuration.build().
*/
private Builder() {
//Type inference fail.
//These maps have their contents copied in the other constructor, so
//just use these singleton empty maps.
this(ImmutableMap.<String, Parameter>of(), ImmutableMap.<String, Configuration>of());
}
/**
* Constructs a new Builder with the given parameters and
* subconfigurations. Called only by Builder.clone().
* @param parameters the parameters
* @param subconfigurations the subconfigurations
*/
private Builder(Map<String, Parameter> parameters, Map<String, Configuration> subconfigurations) {
//Only called by our own code, so assert.
assert parameters != null;
assert subconfigurations != null;
this.parameters = new HashMap<>(parameters);
this.subconfigurations = new HashMap<>(subconfigurations);
}
public Builder addParameter(Parameter parameter) {
checkNotNull(parameter);
//The parameter constructor should enforce this, so assert.
assert !Strings.isNullOrEmpty(parameter.getName()) : parameter;
checkArgument(!parameters.containsKey(parameter.getName()), "conflicting names %s %s", parameters.get(parameter.getName()), parameters);
parameters.put(parameter.getName(), parameter);
return this;
}
/**
* Removes and returns the parameter with the given name from this
* builder, or returns null if this builder doesn't contain a parameter
* with that name.
* @param name the name of the parameter to remove
* @return the removed parameter, or null
*/
public Parameter removeParameter(String name) {
return parameters.remove(checkNotNull(Strings.emptyToNull(name)));
}
public Builder addSubconfiguration(String name, Configuration subconfiguration) {
checkNotNull(Strings.emptyToNull(name));
checkNotNull(subconfiguration);
checkArgument(!subconfigurations.containsKey(name), "name %s already in use", name);
subconfigurations.put(name, subconfiguration);
return this;
}
/**
* Removes and returns the subconfiguration with the given name from
* this builder, or returns null if this builder doesn't contain a
* subconfiguration with that name.
* @param name the name of the subconfiguration to remove
* @return the removed subconfiguration, or null
*/
public Configuration removeSubconfiguration(String name) {
return subconfigurations.remove(checkNotNull(Strings.emptyToNull(name)));
}
/**
* Builds a new Configuration from the parameters and subconfigurations
* added to this builder. This builder is still valid and may be used
* to build more configurations (perhaps after adding or removing
* elements), but the returned configurations remain immutable.
* @return a new Configuration containing the parameters and
* subconfigurations added to this builder
*/
public Configuration build() {
return new Configuration(ImmutableMap.copyOf(parameters), ImmutableMap.copyOf(subconfigurations));
}
/**
* Returns a copy of this builder. Subsequent changes to this builder
* have no effect on the copy, and vice versa. This method is useful
* for creating "prototype" builders that can be cloned, customized,
* and built.
* @return a copy of this builder
*/
@Override
public Builder clone() {
//We're final, so we don't need to use super.clone().
return new Builder(parameters, subconfigurations);
}
}
/**
* Creates a new, empty builder.
* @return a new, empty builder
*/
public static Builder builder() {
return new Builder();
}
public static Configuration fromJson(String json) {
return Jsonifiers.fromJson(json, Configuration.class);
}
public String toJson() {
return Jsonifiers.toJson(this).toString();
}
/**
* JSON-ifies Configurations. Note that Configuration handles its maps
* specially to simplify parsing on the Python side.
*
* This class is protected with a public constructor to allow ServiceLoader
* to instantiate it.
*/
protected static final class ConfigurationJsonifier implements Jsonifier<Configuration>, JsonifierFactory {
public ConfigurationJsonifier() {}
@Override
public Configuration fromJson(JsonValue value) {
JsonObject configObj = Jsonifiers.checkClassEqual(value, Configuration.class);
JsonObject parametersObj = checkNotNull(configObj.getJsonObject("params"));
JsonObject subconfigurationsObj = checkNotNull(configObj.getJsonObject("subconfigs"));
Builder builder = builder();
for (Map.Entry<String, JsonValue> param : parametersObj.entrySet())
builder.addParameter(Jsonifiers.fromJson(param.getValue(), Parameter.class));
for (Map.Entry<String, JsonValue> subconfiguration : subconfigurationsObj.entrySet())
builder.addSubconfiguration(subconfiguration.getKey(), Jsonifiers.fromJson(subconfiguration.getValue(), Configuration.class));
return builder.build();
}
@Override
public JsonValue toJson(Configuration t) {
JsonObjectBuilder paramsBuilder = Json.createObjectBuilder();
for (Map.Entry<String, Parameter> param : t.parameters.entrySet())
paramsBuilder.add(param.getKey(), Jsonifiers.toJson(param.getValue()));
JsonObjectBuilder subconfigsBuilder = Json.createObjectBuilder();
for (Map.Entry<String, Configuration> subconfig : t.subconfigurations.entrySet())
subconfigsBuilder.add(subconfig.getKey(), Jsonifiers.toJson(subconfig.getValue()));
return Json.createObjectBuilder()
.add("class", Jsonifiers.toJson(Configuration.class))
.add("params", paramsBuilder)
.add("subconfigs", subconfigsBuilder)
//Python-side support
.add("__module__", "configuration")
.add("__class__", Configuration.class.getSimpleName())
.build();
}
@Override
@SuppressWarnings("unchecked")
public <T> Jsonifier<T> getJsonifier(Class<T> klass) {
return (Jsonifier<T>)(klass.equals(Configuration.class) ? this : null);
}
}
/**
* Returns an immutable mapping of parameter names to the parameters in this
* configuration.
* @return an immutable mapping of the parameters in this configuration
*/
public ImmutableMap<String, Parameter> getParametersMap() {
return parameters;
}
/**
* Gets the parameter with the given name, or null if this configuration
* doesn't contain a parameter with that name.
* @param name the name of the parameter
* @return the parameter, or null
*/
public Parameter getParameter(String name) {
return parameters.get(checkNotNull(Strings.emptyToNull(name)));
}
/**
* Gets the parameter with the given name cast to the given parameter type,
* or null if this configuration doesn't contain a parameter with that name.
* If this configuration does have a parameter with that name but of a
* different type, a ClassCastException will be thrown.
* @param <T> the type of the parameter to get
* @param name the name of the parameter
* @param parameterType the type of the parameter
* @return the parameter, or null
* @throws ClassCastException if the parameter with the given name exists
* but is of a different type
*/
public <T extends Parameter> T getParameter(String name, Class<T> parameterType) {
return checkNotNull(parameterType).cast(getParameter(name));
}
/**
* Gets the generic parameter with the given name cast to the given
* parameter type (including checking the type parameter type), or null if
* this configuration doesn't contain a parameter with that name. If this
* configuration does have a parameter with that name but of a different
* type or with a different type parameter type,
*/
public <U, T extends GenericParameter<?>, V extends GenericParameter<U>> V getParameter(String name, Class<T> parameterType, Class<U> typeParameterType) {
T parameter = getParameter(name, parameterType);
//This must be an exact match.
if (parameter.getGenericParameter() != typeParameterType)
throw new ClassCastException("Type parameter type mismatch: "+parameter.getGenericParameter() +" != "+typeParameterType);
//Due to the checks above, this is safe.
@SuppressWarnings("unchecked")
V retval = (V)parameter;
return retval;
}
/**
* Returns an immutable mapping of subconfiguration names to the
* subconfigurations of this configuration.
* @return an immutable mapping of the subconfigurations of this
* configuration
*/
public ImmutableMap<String, Configuration> getSubconfigurationsMap() {
return subconfigurations;
}
/**
* Gets the subconfiguration with the given name, or null if this
* configuration doesn't contain a subconfiguration with that name.
* @param name the name of the subconfiguration
* @return the subconfiguration, or null
*/
public Configuration getSubconfiguration(String name) {
return subconfigurations.get(checkNotNull(Strings.emptyToNull(name)));
}
/**
* A Parameter is a configuration object with a name. All implementations
* of this interface are immutable.
*
* Users of Configuration shouldn't implement this interface themselves;
* instead, use one of the provided implementations in Configuration.
*/
public interface Parameter extends Serializable {
public String getName();
}
/**
* A GenericParameter is a Parameter with a type parameter. (The name
* GenericParameter was chosen in preference to ParameterizedParameter.)
*
* This interface isn't particularly interesting in and of itself; it mostly
* exists to make the Configuration.getParameter(String, Class<T>, Class<U>)
* overload have the proper (and checked) return type.
* @param <T>
*/
public interface GenericParameter<T> extends Parameter {
public Class<?> getGenericParameter();
}
/**
* An IntParameter has an integer value that lies within some closed range.
* The lower and upper bounds are <b>inclusive</b>.
*/
public static final class IntParameter implements Parameter {
private static final long serialVersionUID = 1L;
private final String name;
/**
* The Range of this IntParameter. Note that this range is closed on
* both ends.
*/
private final Range<Integer> range;
/**
* The value of this IntParameter, which must be contained in the range.
*/
private final int value;
/**
* Constructs a new IntParameter.
* @param name the parameter's name
* @param min the minimum of the range (inclusive)
* @param max the maximum of the range (inclusive)
* @param value the parameter's value
*/
public IntParameter(String name, int min, int max, int value) {
this(name, Range.closed(min, max), value);
}
/**
* Constructs a new IntParameter.
* @param name the parameter's name
* @param range the parameter's range, which must be nonempty and closed
* at both ends
* @param value the parameter's value
*/
public IntParameter(String name, Range<Integer> range, int value) {
this.name = checkNotNull(Strings.emptyToNull(name));
checkNotNull(range);
checkArgument(range.hasLowerBound() && range.lowerBoundType() == BoundType.CLOSED
&& range.hasUpperBound() && range.upperBoundType() == BoundType.CLOSED
&& !range.isEmpty());
this.range = range;
checkArgument(range.contains(value));
this.value = value;
}
protected static final class IntParameterJsonifier implements Jsonifier<IntParameter>, JsonifierFactory {
public IntParameterJsonifier() {}
@Override
public IntParameter fromJson(JsonValue jsonvalue) {
JsonObject obj = Jsonifiers.checkClassEqual(jsonvalue, IntParameter.class);
String name = obj.getString("name");
int min = obj.getInt("min");
int max = obj.getInt("max");
int value = obj.getInt("value");
return new IntParameter(name, min, max, value);
}
@Override
public JsonValue toJson(IntParameter t) {
return Json.createObjectBuilder()
.add("class", Jsonifiers.toJson(IntParameter.class))
.add("name", t.getName())
.add("min", t.getMin())
.add("max", t.getMax())
.add("value", t.getValue())
//Python-side support
.add("__module__", "parameters")
.add("__class__", IntParameter.class.getSimpleName())
.build();
}
@Override
@SuppressWarnings("unchecked")
public <T> Jsonifier<T> getJsonifier(Class<T> klass) {
return (Jsonifier<T>)(klass.equals(IntParameter.class) ? this : null);
}
}
@Override
public String getName() {
return name;
}
public int getMin() {
return range.lowerEndpoint();
}
public int getMax() {
return range.upperEndpoint();
}
public Range<Integer> getRange() {
return range;
}
public int getValue() {
return value;
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final IntParameter other = (IntParameter)obj;
if (!Objects.equals(this.name, other.name))
return false;
if (!Objects.equals(this.range, other.range))
return false;
if (this.value != other.value)
return false;
return true;
}
@Override
public int hashCode() {
int hash = 3;
hash = 97 * hash + Objects.hashCode(this.name);
hash = 97 * hash + Objects.hashCode(this.range);
hash = 97 * hash + this.value;
return hash;
}
@Override
public String toString() {
return String.format("[%s: %d in %s]", name, value, range);
}
}
/**
* A SwitchParameter represents a choice of one of some universe of objects.
* For example, a SwitchParameter<Boolean> is a simple on-off flag, while a
* SwitchParameter<ChannelFactory> represents a choice of factories.
*
* The autotuner assumes there's no numeric relationship between the objects
* in the universe, in contrast to IntParameter, for which it will try to
* fit a model.
*
* The order of a SwitchParameter's universe is relevant for equals() and
* hashCode() and correct operation of the autotuner. (To the autotuner, a
* SwitchParameter is just an integer between 0 and the universe size; if
* the order of the universe changes, the meaning of that integer changes
* and the autotuner will get confused.)
*
* Objects put into SwitchParameters must implements equals() and hashCode()
* for SwitchParameter's equals() and hashCode() methods to work correctly.
* Objects put into SwitchParameters must be immutable.
*
* To the extent possible, the type T should not itself contain type
* parameters. Consider defining a new class or interface that fixes the
* type parameters.
*
* TODO: restrictions required for JSON representation: toString() and
* fromString/valueOf/String ctor, fallback to base64 encoded Serializable,
* etc; List/Set etc. not good unless contains only one type (e.g.,
* List<String> can be handled okay)
* @param <T>
*/
public static final class SwitchParameter<T> implements GenericParameter<Boolean> {
private static final long serialVersionUID = 1L;
private final String name;
/**
* The type of elements in this SwitchParameter.
*/
private final Class<T> type;
/**
* The universe of this SwitchParameter -- must not contain any
* duplicate elements.
*/
private final ImmutableList<T> universe;
/**
* The index of the value in the universe. Note that most of the
* interface prefers to work with Ts rather than values.
*/
private final int value;
/**
* Create a new SwitchParameter with the given type, value, and
* universe. The universe must contain at least one element, contain no
* duplicate elements, and contain the value.
*
* The type must be provided explicitly, rather than being inferred as
* value.getClass(), as value might be of a more-derived type than the
* elements in the universe.
* @param name the name of this parameter
* @param type the type of the universe
* @param value the value of this parameter
* @param universe the universe of possible values of this parameter
*/
public SwitchParameter(String name, Class<T> type, T value, Iterable<? extends T> universe) {
this.name = checkNotNull(Strings.emptyToNull(name));
this.type = checkNotNull(type);
int size = 0;
ImmutableSet.Builder<T> builder = ImmutableSet.builder();
for (T t : universe) {
checkArgument(!ReflectionUtils.usesObjectEquality(t.getClass()), "all objects in universe must have proper equals()/hashCode()");
builder.add(t);
++size;
}
ImmutableSet<T> set = builder.build();
checkArgument(set.size() == size, "universe contains duplicate elements");
//A single element universe is permitted, through not particularly
//useful.
checkArgument(set.size() > 0, "empty universe");
this.universe = set.asList();
this.value = checkElementIndex(this.universe.indexOf(value), this.universe.size(), "value not in universe");
}
/**
* Creates a new SwitchParameter<Boolean> with the given name and value.
* The universe is [false, true].
* @param name the name of this parameter
* @param value the value of this parameter (true or false)
* @return a new SwitchParameter<Boolean> with the given name and value
*/
public static SwitchParameter<Boolean> create(String name, boolean value) {
return new SwitchParameter<>(name, Boolean.class, value, Arrays.asList(false, true));
}
protected static final class SwitchParameterJsonifier implements Jsonifier<SwitchParameter<?>>, JsonifierFactory {
public SwitchParameterJsonifier() {}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public SwitchParameter<?> fromJson(JsonValue jsonvalue) {
JsonObject obj = Jsonifiers.checkClassEqual(jsonvalue, SwitchParameter.class);
String name = obj.getString("name");
Class<?> universeType = Jsonifiers.fromJson(obj.get("universeType"), Class.class);
ImmutableList<?> universe = ImmutableList.copyOf(Jsonifiers.fromJson(obj.get("universe"), ReflectionUtils.getArrayType(universeType)));
//We should have caught this in fromJson(v, universeType).
assert Jsonifiers.notHeapPolluted(universe, universeType);
int value = obj.getInt("value");
return new SwitchParameter(name, universeType, universe.get(value), universe);
}
@Override
public JsonValue toJson(SwitchParameter<?> t) {
return Json.createObjectBuilder()
.add("class", Jsonifiers.toJson(SwitchParameter.class))
.add("name", t.getName())
.add("universeType", Jsonifiers.toJson(t.type))
.add("universe", Jsonifiers.toJson(t.universe.toArray()))
.add("value", t.value)
//Python-side support
.add("__module__", "parameters")
.add("__class__", SwitchParameter.class.getSimpleName())
.build();
}
@Override
@SuppressWarnings("unchecked")
public <T> Jsonifier<T> getJsonifier(Class<T> klass) {
return (Jsonifier<T>)(klass.equals(SwitchParameter.class) ? this : null);
}
}
@Override
public String getName() {
return name;
}
@Override
public Class<T> getGenericParameter() {
return type;
}
/**
* Gets this parameter's value.
* @return this parameter's value
*/
public T getValue() {
return universe.get(value);
}
/**
* Gets the universe of possible values for this parameter.
* @return the universe of possible values for this parameter
*/
public ImmutableList<T> getUniverse() {
return universe;
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final SwitchParameter<?> other = (SwitchParameter<?>)obj;
if (!Objects.equals(this.type, other.type))
return false;
if (!Objects.equals(this.universe, other.universe))
return false;
if (this.value != other.value)
return false;
return true;
}
@Override
public int hashCode() {
int hash = 7;
hash = 47 * hash + Objects.hashCode(this.type);
hash = 47 * hash + Objects.hashCode(this.universe);
hash = 47 * hash + this.value;
return hash;
}
@Override
public String toString() {
return String.format("[%s: %s (index %d) of %s]", name, getValue(), value, universe);
}
}
/**
* A PartitionParameter represents a partitioning of a stream graph
* (workers) into Blobs, the kind of those Blobs, and the mapping of Blobs
* to cores on machines.
* <p/>
* For the purposes of this class, machines are considered distinct, but
* cores on the same machine are not.
*/
public static final class PartitionParameter implements Parameter {
private static final long serialVersionUID = 1L;
private final String name;
/**
* The number of cores on each machine. Always contains at least one
* element and all elements are always >= 1.
*/
private final ImmutableList<Integer> coresPerMachine;
/**
* A list per machine of a list of blobs on that machine. The inner
* lists are sorted.
*/
private final ImmutableList<ImmutableList<BlobSpecifier>> blobs;
/**
* The BlobFactories that can be used to create blobs. This list
* contains no duplicate elements.
*/
private final ImmutableList<BlobFactory> blobFactoryUniverse;
/**
* The maximum identifier of a worker in the stream graph, used during
* deserialization to check that all workers have been assigned to a
* blob.
*/
private final int maxWorkerIdentifier;
/**
* Only called by the builder.
*/
private PartitionParameter(String name, ImmutableList<Integer> coresPerMachine, ImmutableList<ImmutableList<BlobSpecifier>> blobs, ImmutableList<BlobFactory> blobFactoryUniverse, int maxWorkerIdentifier) {
this.name = name;
this.coresPerMachine = coresPerMachine;
this.blobs = blobs;
this.blobFactoryUniverse = blobFactoryUniverse;
this.maxWorkerIdentifier = maxWorkerIdentifier;
}
public static final class Builder {
private final String name;
private final ImmutableList<Integer> coresPerMachine;
private final int[] coresAvailable;
private final List<BlobFactory> blobFactoryUniverse = new ArrayList<>();
private final List<List<BlobSpecifier>> blobs = new ArrayList<>();
private final NavigableSet<Integer> workersInBlobs = new TreeSet<>();
private Builder(String name, ImmutableList<Integer> coresPerMachine) {
this.name = name;
this.coresPerMachine = coresPerMachine;
this.coresAvailable = Ints.toArray(this.coresPerMachine);
//You might think we can use Collections.nCopies() here, but
//that would mean all cores would share the same list!
for (int i = 0; i < coresPerMachine.size(); ++i)
blobs.add(new ArrayList<BlobSpecifier>());
}
public Builder addBlobFactory(BlobFactory factory) {
checkArgument(!ReflectionUtils.usesObjectEquality(factory.getClass()), "blob factories must have a proper equals() and hashCode()");
checkArgument(!blobFactoryUniverse.contains(checkNotNull(factory)), "blob factory already added");
blobFactoryUniverse.add(factory);
return this;
}
public Builder addBlob(int machine, int cores, BlobFactory blobFactory, Set<Worker<?, ?>> workers) {
checkElementIndex(machine, coresPerMachine.size());
checkArgument(cores <= coresAvailable[machine],
"allocating %s cores but only %s available on machine %s",
cores, coresAvailable[machine], machine);
checkArgument(blobFactoryUniverse.contains(blobFactory),
"blob factory %s not in universe %s", blobFactory, blobFactoryUniverse);
ImmutableSortedSet.Builder<Integer> builder = ImmutableSortedSet.naturalOrder();
for (Worker<?, ?> worker : workers) {
int identifier = Workers.getIdentifier(worker);
checkArgument(identifier >= 0, "uninitialized worker identifier: %s", worker);
checkArgument(!workersInBlobs.contains(identifier), "worker %s already assigned to blob", worker);
builder.add(identifier);
}
ImmutableSortedSet<Integer> workerIdentifiers = builder.build();
//Okay, we've checked everything. Commit.
blobs.get(machine).add(new BlobSpecifier(workerIdentifiers, machine, cores, blobFactory));
workersInBlobs.addAll(workerIdentifiers);
coresAvailable[machine] -= cores;
return this;
}
public PartitionParameter build() {
ImmutableList.Builder<ImmutableList<BlobSpecifier>> blobBuilder = ImmutableList.builder();
for (List<BlobSpecifier> list : blobs) {
Collections.sort(list);
blobBuilder.add(ImmutableList.copyOf(list));
}
return new PartitionParameter(name, coresPerMachine, blobBuilder.build(), ImmutableList.copyOf(blobFactoryUniverse), workersInBlobs.last());
}
}
public static Builder builder(String name, List<Integer> coresPerMachine) {
checkArgument(!coresPerMachine.isEmpty());
for (Integer i : coresPerMachine)
checkArgument(checkNotNull(i) >= 1);
return new Builder(checkNotNull(Strings.emptyToNull(name)), ImmutableList.copyOf(coresPerMachine));
}
public static Builder builder(String name, int... coresPerMachine) {
return builder(name, Ints.asList(coresPerMachine));
}
/**
* A blob's properties.
*/
public static final class BlobSpecifier implements Comparable<BlobSpecifier> {
/**
* The identifiers of the workers in this blob.
*/
private final ImmutableSortedSet<Integer> workerIdentifiers;
/**
* The index of the machine this blob is on.
*/
private final int machine;
/**
* The number of cores allocated to this blob.
*/
private final int cores;
/**
* The BlobFactory to be used to create this blob.
*/
private final BlobFactory blobFactory;
private BlobSpecifier(ImmutableSortedSet<Integer> workerIdentifiers, int machine, int cores, BlobFactory blobFactory) {
this.workerIdentifiers = workerIdentifiers;
checkArgument(machine >= 0);
this.machine = machine;
checkArgument(cores >= 1, "all blobs must be assigned at least one core");
this.cores = cores;
this.blobFactory = blobFactory;
}
protected static final class BlobSpecifierJsonifier implements Jsonifier<BlobSpecifier>, JsonifierFactory {
public BlobSpecifierJsonifier() {}
@Override
public BlobSpecifier fromJson(JsonValue value) {
//TODO: array serialization, error checking
JsonObject obj = Jsonifiers.checkClassEqual(value, BlobSpecifier.class);
int machine = obj.getInt("machine");
int cores = obj.getInt("cores");
BlobFactory blobFactory = Jsonifiers.fromJson(obj.get("blobFactory"), BlobFactory.class);
ImmutableSortedSet.Builder<Integer> builder = ImmutableSortedSet.naturalOrder();
for (JsonValue i : obj.getJsonArray("workerIds"))
builder.add(Jsonifiers.fromJson(i, Integer.class));
return new BlobSpecifier(builder.build(), machine, cores, blobFactory);
}
@Override
public JsonValue toJson(BlobSpecifier t) {
JsonArrayBuilder workerIds = Json.createArrayBuilder();
for (int i : t.workerIdentifiers)
workerIds.add(i);
return Json.createObjectBuilder()
.add("class", Jsonifiers.toJson(BlobSpecifier.class))
.add("machine", t.machine)
.add("cores", t.cores)
.add("blobFactory", Jsonifiers.toJson(t.blobFactory))
.add("workerIds", workerIds)
//Python-side support
.add("__module__", "configuration")
.add("__class__", BlobSpecifier.class.getSimpleName())
.build();
}
@Override
@SuppressWarnings("unchecked")
public <T> Jsonifier<T> getJsonifier(Class<T> klass) {
return (Jsonifier<T>)(klass.equals(BlobSpecifier.class) ? this : null);
}
}
public ImmutableSortedSet<Integer> getWorkerIdentifiers() {
return workerIdentifiers;
}
public ImmutableSet<Worker<?, ?>> getWorkers(Worker<?, ?> streamGraph) {
ImmutableSet<Worker<?, ?>> allWorkers = Workers.getAllWorkersInGraph(streamGraph);
ImmutableMap<Integer, Worker<?, ?>> workersByIdentifier =
Maps.uniqueIndex(allWorkers, new Function<Worker<?, ?>, Integer>() {
@Override
public Integer apply(Worker<?, ?> input) {
return Workers.getIdentifier(input);
}
});
ImmutableSet.Builder<Worker<?, ?>> workersInBlob = ImmutableSet.builder();
for (Integer i : workerIdentifiers) {
Worker<?, ?> w = workersByIdentifier.get(i);
if (w == null)
throw new IllegalArgumentException("Identifier " + i + " not in given stream graph");
workersInBlob.add(w);
}
return workersInBlob.build();
}
public int getMachine() {
return machine;
}
public int getCores() {
return cores;
}
public BlobFactory getBlobFactory() {
return blobFactory;
}
@Override
public int hashCode() {
int hash = 3;
hash = 37 * hash + Objects.hashCode(this.workerIdentifiers);
hash = 37 * hash + this.machine;
hash = 37 * hash + this.cores;
hash = 37 * hash + Objects.hashCode(this.blobFactory);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final BlobSpecifier other = (BlobSpecifier)obj;
if (!Objects.equals(this.workerIdentifiers, other.workerIdentifiers))
return false;
if (this.machine != other.machine)
return false;
if (this.cores != other.cores)
return false;
if (!Objects.equals(this.blobFactory, other.blobFactory))
return false;
return true;
}
@Override
public int compareTo(BlobSpecifier o) {
//Worker identifiers are unique within the stream graph, so
//we can base our comparison on them.
return workerIdentifiers.first().compareTo(o.workerIdentifiers.first());
}
}
protected static final class PartitionParameterJsonifier implements Jsonifier<PartitionParameter>, JsonifierFactory {
public PartitionParameterJsonifier() {}
@Override
public PartitionParameter fromJson(JsonValue value) {
//TODO: array serialization, error checking
JsonObject obj = Jsonifiers.checkClassEqual(value, PartitionParameter.class);
String name = obj.getString("name");
int maxWorkerIdentifier = obj.getInt("maxWorkerIdentifier");
ImmutableList.Builder<Integer> coresPerMachine = ImmutableList.builder();
for (JsonValue v : obj.getJsonArray("coresPerMachine"))
coresPerMachine.add(Jsonifiers.fromJson(v, Integer.class));
ImmutableList.Builder<BlobFactory> blobFactoryUniverse = ImmutableList.builder();
for (JsonValue v : obj.getJsonArray("blobFactoryUniverse"))
blobFactoryUniverse.add(Jsonifiers.fromJson(v, BlobFactory.class));
List<List<BlobSpecifier>> mBlobs = new ArrayList<>();
for (int i = 0; i < coresPerMachine.build().size(); ++i)
mBlobs.add(new ArrayList<BlobSpecifier>());
for (JsonValue v : obj.getJsonArray("blobs")) {
BlobSpecifier bs = Jsonifiers.fromJson(v, BlobSpecifier.class);
mBlobs.get(bs.getMachine()).add(bs);
}
ImmutableList.Builder<ImmutableList<BlobSpecifier>> blobs = ImmutableList.builder();
for (List<BlobSpecifier> m : mBlobs)
blobs.add(ImmutableList.copyOf(m));
return new PartitionParameter(name, coresPerMachine.build(), blobs.build(), blobFactoryUniverse.build(), maxWorkerIdentifier);
}
@Override
public JsonValue toJson(PartitionParameter t) {
JsonArrayBuilder coresPerMachine = Json.createArrayBuilder();
for (int i : t.coresPerMachine)
coresPerMachine.add(i);
JsonArrayBuilder blobFactoryUniverse = Json.createArrayBuilder();
for (BlobFactory factory : t.blobFactoryUniverse)
blobFactoryUniverse.add(Jsonifiers.toJson(factory));
JsonArrayBuilder blobs = Json.createArrayBuilder();
for (List<BlobSpecifier> machine : t.blobs)
for (BlobSpecifier blob : machine)
blobs.add(Jsonifiers.toJson(blob));
return Json.createObjectBuilder()
.add("class", Jsonifiers.toJson(PartitionParameter.class))
.add("name", t.getName())
.add("maxWorkerIdentifier", t.maxWorkerIdentifier)
.add("coresPerMachine", coresPerMachine)
.add("blobFactoryUniverse", blobFactoryUniverse)
.add("blobs", blobs)
//Python-side support
.add("__module__", "parameters")
.add("__class__", PartitionParameter.class.getSimpleName())
.build();
}
@Override
@SuppressWarnings("unchecked")
public <T> Jsonifier<T> getJsonifier(Class<T> klass) {
return (Jsonifier<T>)(klass.equals(PartitionParameter.class) ? this : null);
}
}
@Override
public String getName() {
return name;
}
public int getMachineCount() {
return coresPerMachine.size();
}
public int getCoresOnMachine(int machine) {
return coresPerMachine.get(machine);
}
public ImmutableList<BlobSpecifier> getBlobsOnMachine(int machine) {
return blobs.get(machine);
}
public ImmutableList<BlobFactory> getBlobFactories() {
return blobFactoryUniverse;
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final PartitionParameter other = (PartitionParameter)obj;
if (!Objects.equals(this.name, other.name))
return false;
if (!Objects.equals(this.coresPerMachine, other.coresPerMachine))
return false;
if (!Objects.equals(this.blobs, other.blobs))
return false;
if (!Objects.equals(this.blobFactoryUniverse, other.blobFactoryUniverse))
return false;
if (this.maxWorkerIdentifier != other.maxWorkerIdentifier)
return false;
return true;
}
@Override
public int hashCode() {
int hash = 3;
hash = 61 * hash + Objects.hashCode(this.name);
hash = 61 * hash + Objects.hashCode(this.coresPerMachine);
hash = 61 * hash + Objects.hashCode(this.blobs);
hash = 61 * hash + Objects.hashCode(this.blobFactoryUniverse);
hash = 61 * hash + this.maxWorkerIdentifier;
return hash;
}
}
public static void main(String[] args) {
Configuration.Builder builder = Configuration.builder();
builder.addParameter(new IntParameter("foo", 0, 10, 8));
builder.addParameter(SwitchParameter.create("bar", true));
Identity<Integer> first = new Identity<>(), second = new Identity<>();
Pipeline<Integer, Integer> pipeline = new Pipeline<>(first, second);
ConnectWorkersVisitor cwv = new ConnectWorkersVisitor();
pipeline.visit(cwv);
PartitionParameter.Builder partParam = PartitionParameter.builder("part", 1, 1);
BlobFactory factory = new Interpreter.InterpreterBlobFactory();
partParam.addBlobFactory(factory);
partParam.addBlob(0, 1, factory, Collections.<Worker<?, ?>>singleton(first));
partParam.addBlob(1, 1, factory, Collections.<Worker<?, ?>>singleton(second));
builder.addParameter(partParam.build());
Configuration cfg1 = builder.build();
String json = Jsonifiers.toJson(cfg1).toString();
System.out.println(json);
Configuration cfg2 = Jsonifiers.fromJson(json, Configuration.class);
System.out.println(cfg2);
String json2 = Jsonifiers.toJson(cfg2).toString();
System.out.println(json2);
/*Configuration.Builder builder = Configuration.builder();
builder.addParameter(new IntParameter("foo", 0, 10, 8));
Configuration cfg1 = builder.build();
String json = Jsonifiers.toJson(cfg1).toString();
System.out.println(json);*/
}
} |
package edu.nyu.cs.cs2580;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.Vector;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.jsoup.Jsoup;
import edu.nyu.cs.cs2580.SearchEngine.Options;
import edu.nyu.cs.cs2580.utils.PersistentStoreManager;
import edu.nyu.cs.cs2580.utils.PersistentStoreManager.IvtMapByte;
import edu.nyu.cs.cs2580.utils.PersistentStoreManager.TermFrequencyManager;
/**
* @CS2580: Implement this class for HW2.
*/
public class IndexerInvertedCompressed extends Indexer {
private List<IvtMapByte> ivtIndexMapList = new ArrayList<IvtMapByte>();
private Map<Integer, DocumentIndexed> docMap = null;
private Map<String, Integer> docUrlMap = null;
private Map<String, Object> infoMap = null;
// Table name for index of documents.
private static final String DOC_IDX_TBL = "docDB";
private static final String DOC_INFO_TBL = "docInfoDB";
private static final String DOC_URL_TBL = "docUrlDB";
private TermFrequencyManager tfm;
private List<File> getAllFiles(final File folder) {
List<File> fileList = new LinkedList<File>();
for (final File fileEntry : folder.listFiles()) {
if (fileEntry.isDirectory()) {
fileList.addAll(getAllFiles(fileEntry));
} else {
fileList.add(fileEntry);
}
}
return fileList;
}
private class InvertIndexBuildingTask implements Runnable {
private List<File> files;
private int startFileIdx;
private int endFileIdx;
private Map<String, List<Byte>> ivtMap;
private long termCount = 0;
public InvertIndexBuildingTask(List<File> files, int startFileIdx,
int endFileIdx, Map<String, List<Byte>> ivtMap) {
this.files = files;
this.startFileIdx = startFileIdx;
this.endFileIdx = endFileIdx;
this.ivtMap = ivtMap;
}
public long getTermCount() {
return termCount;
}
@Override
public void run() {
System.out.println("Thread " + Thread.currentThread().getName()
+ " processes files from " + startFileIdx + " to "
+ endFileIdx);
for (int docId = startFileIdx; docId < endFileIdx; docId++) {
File file = files.get(docId);
Map<String, List<Integer>> ivtMapItem = new HashMap<String, List<Integer>>();
Map<String, Integer> ferqMap = new HashMap<String, Integer>();
org.jsoup.nodes.Document doc;
try {
doc = Jsoup.parse(file, "UTF-8");
} catch (IOException e1) {
continue;
}
String title = doc.title();
String text = doc.text();
Stemmer s = new Stemmer();
Scanner scanner = new Scanner(text);
int passageLength = 0;
while (scanner.hasNext()) {
String token = scanner.next().toLowerCase();
s.add(token.toCharArray(), token.length());
s.stem();
token = s.toString();
if (token.length() < 1 || token.length() > 20) {
continue;
}
if (!ferqMap.containsKey(token)) {
ferqMap.put(token, 0);
}
ferqMap.put(token, ferqMap.get(token) + 1);
if (!ivtMapItem.containsKey(token)) {
ArrayList<Integer> occList = new ArrayList<Integer>();
ivtMapItem.put(token, occList);
}
List<Integer> occList = ivtMapItem.get(token);
occList.add(passageLength);
ivtMapItem.put(token, occList);
passageLength++;
}
termCount += passageLength;
tfm.addTermFrequencyForDoc(docId, ferqMap);
String url = file.getName();
DocumentIndexed di = new DocumentIndexed(docId);
di.setTitle(title);
di.setUrl(url);
di.setLength(passageLength);
// for each token in each document, add to the map of <token,
// {docid, occ}>
for (String token : ivtMapItem.keySet()) {
if (!ivtMap.containsKey(token)) {
ivtMap.put(token, new ArrayList<Byte>());
}
List<Byte> recordList = ivtMap.get(token);
List<Integer> occList = ivtMapItem.get(token);
List<Byte> _docId = IndexerInvertedCompressed
.compressInt(docId); // get the compressed id
// sequentially add <docid, occurrence> to the posting list.
for (int e : occList) {
recordList.addAll(_docId);
ArrayList<Byte> _occ = compressInt(e);
recordList.addAll(_occ);
}
}
buildDocumentIndex(di);
}
}
}
public IndexerInvertedCompressed(Options options) {
super(options);
System.out.println("Using Indexer: " + this.getClass().getSimpleName());
}
@Override
public void constructIndex() throws IOException {
String corpusFolder = _options._corpusPrefix;
System.out.println("Construct index from: " + corpusFolder);
long start_t = System.currentTimeMillis();
cleanUpDirectory();
// Get all corpus files.
List<File> files = getAllFiles(new File(corpusFolder));
int filesPerBatch = 1750;
int threadCount = 1;
System.out.println("Start building index with " + threadCount
+ " threads. Elapsed: "
+ (System.currentTimeMillis() - start_t) / 1000.0 + "s");
infoMap = new HashMap<String, Object>();
docMap = new HashMap<Integer, DocumentIndexed>();
docUrlMap = new HashMap<String, Integer>();
infoMap.put("_numDocs", files.size());
long termCount = 0;
tfm = new TermFrequencyManager(_options._indexPrefix);
for (int batchNum = 0; batchNum < files.size() / filesPerBatch + 1; batchNum++) {
int fileIdStart = batchNum * filesPerBatch;
int fileIdEnd = (batchNum + 1) * filesPerBatch;
if (fileIdEnd > files.size()) {
fileIdEnd = files.size();
}
System.out.println("Processing files from " + fileIdStart + " to "
+ fileIdEnd);
ExecutorService threadPool = Executors
.newFixedThreadPool(threadCount);
IvtMapByte ivtMapFile = new IvtMapByte(new File(
_options._indexPrefix), "ivt" + batchNum, true);
Map<String, List<Byte>> ivtMap = new HashMap<String, List<Byte>>();
List<InvertIndexBuildingTask> taskList = new ArrayList<InvertIndexBuildingTask>();
int totalFileCount = fileIdEnd - fileIdStart;
int filesPerThread = totalFileCount / threadCount;
for (int threadId = 0; threadId < threadCount; threadId++) {
int startFileIdx = threadId * filesPerThread + fileIdStart;
int endFileIdx = (threadId + 1) * filesPerThread + fileIdStart;
if (threadId == threadCount - 1) {
endFileIdx = fileIdEnd;
}
InvertIndexBuildingTask iibt = new InvertIndexBuildingTask(
files, startFileIdx, endFileIdx, ivtMap);
threadPool.submit(iibt);
taskList.add(iibt);
}
threadPool.shutdown();
try {
threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
// Combine all posting lists for N threads.
for (InvertIndexBuildingTask iibt : taskList) {
termCount += iibt.getTermCount();
}
System.out.println(fileIdEnd
+ " pages have been processed. Elapsed: "
+ (System.currentTimeMillis() - start_t) / 1000.0 + "s");
System.out.println("Writing Inverted Map to disk. " + fileIdEnd
+ " pages have been processed. Elapsed: "
+ (System.currentTimeMillis() - start_t) / 1000.0 + "s");
ivtMapFile.putAll(ivtMap);
ivtMapFile.close();
System.out.println("Batch commit done. Elapsed: "
+ (System.currentTimeMillis() - start_t) / 1000.0 + "s");
}
infoMap.put("_totalTermFrequency", termCount);
tfm.close();
CorpusAnalyzer ca = new CorpusAnalyzerPagerank(_options);
LogMiner lm = new LogMinerNumviews(_options);
Map<String, Double> pageRankMap = (Map<String, Double>) ca.load();
Map<String, Double> numViewsMap = (Map<String, Double>) lm.load();
for(Map.Entry<Integer, DocumentIndexed> die : docMap.entrySet()) {
int docid = die.getKey();
DocumentIndexed di = die.getValue();
String basename = di.getUrl();
di.setPageRank((float)(double)pageRankMap.get(basename));
di.setNumViews((int)(double)numViewsMap.get(basename));
//System.out.println(basename + " " + pageRankMap.get(basename) + " "
// + numViewsMap.get(basename));
}
storeVariables();
long end_t = System.currentTimeMillis();
System.out.println("Construct done. Duration: " + (end_t - start_t)
/ 1000.0 + "s");
}
private void storeVariables() {
File docMapFile = new File(this._options._indexPrefix, DOC_IDX_TBL);
File docUrlFile = new File(this._options._indexPrefix, DOC_URL_TBL);
File docInfoFile = new File(this._options._indexPrefix, DOC_INFO_TBL);
PersistentStoreManager.writeObjectToFile(docMapFile, docMap);
PersistentStoreManager.writeObjectToFile(docUrlFile, docUrlMap);
PersistentStoreManager.writeObjectToFile(docInfoFile, infoMap);
}
private void readVariables() {
File docMapFile = new File(this._options._indexPrefix, DOC_IDX_TBL);
File docUrlFile = new File(this._options._indexPrefix, DOC_URL_TBL);
File docInfoFile = new File(this._options._indexPrefix, DOC_INFO_TBL);
docMap = (Map<Integer, DocumentIndexed>) PersistentStoreManager
.readObjectFromFile(docMapFile);
docUrlMap = (Map<String, Integer>) PersistentStoreManager
.readObjectFromFile(docUrlFile);
infoMap = (Map<String, Object>) PersistentStoreManager
.readObjectFromFile(docInfoFile);
_totalTermFrequency = (Long) infoMap.get("_totalTermFrequency");
_numDocs = (Integer) infoMap.get("_numDocs");
}
private void cleanUpDirectory() {
File dir = new File(_options._indexPrefix);
dir.mkdirs();
for (File file : dir.listFiles()) {
file.delete();
}
}
synchronized private void buildDocumentIndex(DocumentIndexed di) {
docMap.put(di._docid, di);
docUrlMap.put(di.getUrl(), di._docid);
}
@Override
public void loadIndex() throws IOException, ClassNotFoundException {
tfm = new TermFrequencyManager(_options._indexPrefix);
for (int i = 0; i < 100; i++) {
File file = new File(_options._indexPrefix, "ivt" + i);
if (!file.exists()) {
break;
}
ivtIndexMapList.add(new IvtMapByte(new File(_options._indexPrefix),
"ivt" + i, false));
}
readVariables();
}
@Override
public Document getDoc(int docid) {
return docMap.get(docid);
}
/**
* In HW2, you should be using {@link DocumentIndexed}
*/
private String previousQuery=new String();
private int previousDocid=-1;
private Vector<Vector<Integer>> cachePos=new Vector<Vector<Integer>> ();
private int nextInOccurence(int docId, List<Integer> postinglist, int phraseIndex, int termIndex) {
int start = cachePos.get(phraseIndex).get(termIndex);
for (int i = start; i < postinglist.size(); i += 2) {
if (postinglist.get(i) > docId) {
cachePos.get(phraseIndex).set(termIndex, i);
return postinglist.get(i);
}
}
cachePos.get(phraseIndex).set(termIndex, postinglist.size());
return -1;
}
private int nextForOccurence(int docId, Vector<List<Integer>> postinglists, int phraseIndex) {
// System.out.println("current id is: "+docId);
int previousVal = -1;
boolean equilibrium = true;
int maximum = Integer.MIN_VALUE;
for (int i = 0; i < postinglists.size(); i++) {
int currentId = nextInOccurence(docId, postinglists.get(i), phraseIndex, i);
if (currentId < 0)
return -1;
if (previousVal < 0) {
previousVal = currentId;
maximum = currentId;
}
else {
if (previousVal != currentId) {
equilibrium = false;
maximum = Math.max(maximum, currentId);
}
}
}
if (equilibrium == true)
return previousVal;
else
return nextForOccurence(maximum - 1, postinglists, phraseIndex);
}
private int nextPos(List<Integer> postinglist, int docId, int pos, int phrasePos, int termPos) {
int docPosition = -1;
int start = cachePos.get(phrasePos).get(termPos);
for (int i = start; i < postinglist.size(); i += 2) {
if (postinglist.get(i) == docId) {
docPosition = i;
cachePos.get(phrasePos).set(termPos, i);
break;
}
}
if (docPosition == -1) {
cachePos.get(phrasePos).set(termPos, postinglist.size());
return -1;
}
int Pos = docPosition + 1;
while (Pos < postinglist.size() && postinglist.get(Pos - 1) == docId) {
if (postinglist.get(Pos) > pos) {
cachePos.get(phrasePos).set(termPos, Pos - 1);
return postinglist.get(Pos);
}
Pos += 2;
}
cachePos.get(phrasePos).set(termPos, postinglist.size());
return -1;
}
private int nextPhrase(int docId, int pos, Vector<List<Integer>> postinglists, int phrasePos) {
int[] positions = new int[postinglists.size()];
boolean success = true;
for (int i = 0; i < positions.length; i++)
{
positions[i] = nextPos(postinglists.get(i), docId, pos, phrasePos, i);
if (positions[i] < 0)
return -1;
}
// int maximum=positions[0];
for (int i = 1; i < positions.length; i++) {
if (positions[i] != positions[i - 1] + 1)
success = false;
// if (positions[i]>maximum)
// maximum=positions[i];
}
if (success == true)
return positions[0];
else
return nextPhrase(docId, positions[0], postinglists, phrasePos);
}
private int nextPhrase(int docId, Vector<List<Integer>> postinglists, int i) {
int docVerify = nextForOccurence(docId, postinglists, i);
// System.out.println("docVerify is: "+docVerify);
if (docVerify < 0)
return -1;
int result = nextPhrase(docVerify, -1, postinglists, i);
if (result > 0)
return docVerify;
return nextPhrase(docVerify, postinglists, i);
}
private int next(int docId, Vector<Vector<List<Integer>>> postinglists) {
// System.out.println("current id is: "+docId);
int previousVal = -1;
boolean equilibrium = true;
int maximum = Integer.MIN_VALUE;
for (int i = 0; i < postinglists.size(); i++) {
int currentId = nextPhrase(docId, postinglists.get(i), i);
if (currentId < 0)
return -1;
if (previousVal < 0) {
previousVal = currentId;
maximum = currentId;
}
else {
if (previousVal != currentId) {
equilibrium = false;
maximum = Math.max(maximum, currentId);
}
}
}
if (equilibrium == true)
return previousVal;
else
return next(maximum - 1, postinglists);
}
private boolean canUseCache(Query query, int docid) {
if (query._query.equals(previousQuery) == false) {
System.out.println("flag1");
return false;
}
if (docid <= previousDocid) {
System.out.println("flag1");
return false;
}
return true;
}
@Override
public Document nextDoc(Query query, int docid) {
Vector<String> tokens = query._tokens;
int result = -1;
Vector<Vector<List<Integer>>> postingLists = new Vector<Vector<List<Integer>>>();
for (int i = 0; i < tokens.size(); i++) {
Vector<List<Integer>> container = new Vector<List<Integer>>();
String[] consecutiveWords = tokens.get(i).split(" ");
for (int j = 0; j < consecutiveWords.length; j++) {
Stemmer s = new Stemmer();
s.add(consecutiveWords[j].toLowerCase().toCharArray(),
consecutiveWords[j].length());
s.stem();
container.add(decompressArray(ivtGet(s.toString())));
}
// System.out.println("size is: "+docInvertedMap.get(s.toString()).size());
postingLists.add(container);
}
if (canUseCache(query, docid) == false)
{
previousQuery = query._query;
previousDocid = -1;
cachePos = new Vector<Vector<Integer>>();
for (int i = 0; i < postingLists.size(); i++) {
Vector<Integer> tempVec = new Vector<Integer>();
int size = postingLists.get(i).size();
for (int j = 0; j < size; j++)
tempVec.add(0);
cachePos.add(tempVec);
}
}
result = next(docid, postingLists);
previousDocid = result - 1;
if (result < 0)
return null;
else
return getDoc(result);
}
@Override
public int corpusDocFrequencyByTerm(String term) {
// Number of documents in which {@code term} appeared, over the full
// corpus.
// Stem given term.
Stemmer s = new Stemmer();
s.add(term.toLowerCase().toCharArray(), term.length());
s.stem();
if (!ivtContainsKey(s.toString())) {
return 0;
}
// Get posting list from index.
List<Byte> l = ivtGet(s.toString());
int count = 0;
ArrayList<Byte> last_code = new ArrayList<Byte>();
for (int i = 0; i < l.size();) {
// get all the bytes of docid
ArrayList<Byte> code = new ArrayList<Byte>();
byte currByte = l.get(i);
while ((currByte & 0x80) == (byte) 0) {
code.add(currByte);
currByte = l.get(i++);
}
code.add(currByte);
i++;
if (!last_code.equals(code)) {
last_code = code;
++count;
}
// int curr_id = decompressBytes(code);
// if ( curr_id != last_id){
// last_id = curr_id;
// ++count;
// skip the occurrence number
while ((l.get(i) & 0x80) == (byte) 0) {
i++;
}
i++;
}
return count;
}
@Override
public int corpusTermFrequency(String term) {
// Number of times {@code term} appeared in corpus.
// Stem given term.
Stemmer s = new Stemmer();
s.add(term.toLowerCase().toCharArray(), term.length());
s.stem();
if (!ivtContainsKey(s.toString())) {
return 0;
}
// Get posting list from index.
List<Byte> l = ivtGet(s.toString());
int result = 0;
for (int i = 0; i < l.size();) {
while ((l.get(i) & 0x80) == (byte) 0) {
i++;
}
i++;
result++;
}
return result / 2;
}
// do linear search of a docid for compressed posting list, first occurrence
private int linearSearchPostDecompressed(final int docId, final List<Integer> list) {
int i = 0;
int pos = -1;
while (i < list.size()) {
pos = i;
if (list.get(i) == docId) {
return pos;
}
i++; // skip the occurrence
i++; // go to the next docid
}
return -1;
}
@Override
public int documentTermFrequency(String term, int docid) {
// Stem given term.
Stemmer s = new Stemmer();
s.add(term.toLowerCase().toCharArray(), term.length());
s.stem();
if (!ivtContainsKey(s.toString())) {
return 0;
}
// Get posting list from index.
List<Byte> l = ivtGet(s.toString());
ArrayList<Integer> arr = decompressArray(l);
// Use binary search looking for docid within given posting list.
int pos = linearSearchPostDecompressed(docid, arr);
if (pos != -1) {
// Return term frequency for given doc and term
int count = 0;
while (pos < arr.size() - 1 && arr.get(pos) == docid) {
++count;
pos += 2;
}
return count;
} else {
return 0;
}
}
private boolean ivtContainsKey(String key) {
for (Map<String, List<Byte>> m : ivtIndexMapList) {
if (m.containsKey(key)) {
return true;
}
}
return false;
}
private Map<String, List<Byte>> cache = new HashMap<String, List<Byte>>();
private List<Byte> ivtGet(String key) {
if (cache.containsKey(key)) {
return cache.get(key);
}
if (cache.size() > 1) {
cache.remove(cache.keySet().toArray()[0]);
}
List<Byte> l = new ArrayList<Byte>();
for (Map<String, List<Byte>> m : ivtIndexMapList) {
if (m.containsKey(key)) {
l.addAll(m.get(key));
}
}
cache.put(key, l);
return l;
}
// provide all bytes of a docid to get a int
static public int decompressBytes(ArrayList<Byte> code) {
int res = 0;
for (int i = code.size() - 1; i >= 0; --i) {
res += (code.get(i) & 0x7f) << (7 * (code.size() - 1 - i));
}
return res;
}
// provide array of bytes, return array of ints
static public ArrayList<Integer> decompressArray(List<Byte> list) {
ArrayList<Integer> decode = new ArrayList<Integer>();
int i = 0;
while (i < list.size()) {
// get the docid or occurrence
ArrayList<Byte> item = new ArrayList<Byte>();
while ((list.get(i) & 0x80) == (byte) 0) {
item.add(list.get(i));
i++;
if (i >= list.size()) {
System.out.println("Error: illegal code!");
return null;
}
}
item.add(list.get(i));
i++;
// decompress the docid
decode.add(decompressBytes(item));
}
return decode;
}
// provide docid/occurrence to bytes of codes
static public ArrayList<Byte> compressInt(int num) {
int digits = 0;
if (num >= 0 && num < 128) {
digits = 1;
} else if (num < 16384) {
digits = 2;
} else if (num < 2097152) {
digits = 3;
} else if (num < 268435456) {
digits = 4;
} else {
digits = 5;
// System.out.println("!! five digits !!");
}
ArrayList<Byte> res = new ArrayList<Byte>(digits);
for (int i = 0; i < digits - 1; ++i) {
res.add((byte) (0x7f & (num >> (7 * (digits - 1 - i)))));
}
res.add((byte) (0x7f & num | 0x80));
return res;
}
public static void main(String args[]) {
}
@Override
public Map<String, Integer> documentTermFrequencyMap(int docid) {
return tfm.gettermFrequencyForDoc(docid);
}
} |
package edu.washington.escience.myria.column;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.BitSet;
import org.joda.time.DateTime;
import com.google.protobuf.ByteString;
import edu.washington.escience.myria.Type;
import edu.washington.escience.myria.proto.DataProto.BooleanColumnMessage;
import edu.washington.escience.myria.proto.DataProto.ColumnMessage;
import edu.washington.escience.myria.proto.DataProto.DateTimeColumnMessage;
import edu.washington.escience.myria.proto.DataProto.DoubleColumnMessage;
import edu.washington.escience.myria.proto.DataProto.FloatColumnMessage;
import edu.washington.escience.myria.proto.DataProto.IntColumnMessage;
import edu.washington.escience.myria.proto.DataProto.LongColumnMessage;
import edu.washington.escience.myria.proto.DataProto.StringColumnMessage;
import edu.washington.escience.myria.storage.ReadableColumn;
import edu.washington.escience.myria.util.ImmutableIntArray;
/**
* A column of a batch of tuples.
*
* @param <T> type of the objects in this column.
*
*
*/
public abstract class Column<T extends Comparable<?>> implements ReadableColumn, Serializable {
/** Required for Java serialization. */
private static final long serialVersionUID = 1L;
@Override
public boolean getBoolean(final int row) {
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public DateTime getDateTime(final int row) {
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public double getDouble(final int row) {
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public float getFloat(final int row) {
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public int getInt(final int row) {
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public long getLong(final int row) {
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public abstract T getObject(int row);
@Override
public String getString(final int row) {
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public abstract Type getType();
/**
* Serializes this column as a protobuf message into the specified output stream.
*
* @return a ColumnMessage containing a serialized copy of this column.
*/
public ColumnMessage serializeToProto() {
return Column.defaultProto(this);
}
/**
* Serializes this column as a protobuf message into the specified output stream.
*
* @param validIndices the rows of the column to serialize.
* @return a ColumnMessage containing a serialized copy of this column.
*/
public ColumnMessage serializeToProto(final ImmutableIntArray validIndices) {
return Column.defaultProto(this, validIndices);
}
@Override
public abstract int size();
/**
* Creates a new Column containing the contents of this column including only the specified rows.
*
* @param filter a BitSet indicating which rows should be kept.
* @return a new Column containing the contents of this column including only the specified rows.
*/
public Column<T> filter(final BitSet filter) {
return new FilteredColumn<T>(this, filter);
}
/**
* @param type the type of the column to be returned.
* @return a new empty column of the specified type.
*/
public static Column<?> emptyColumn(final Type type) {
switch (type) {
case BOOLEAN_TYPE:
return new BooleanColumn(new BitSet(0), 0);
case DATETIME_TYPE:
return new DateTimeColumn(new DateTime[] {}, 0);
case DOUBLE_TYPE:
return new DoubleColumn(new double[] {}, 0);
case FLOAT_TYPE:
return new FloatColumn(new float[] {}, 0);
case INT_TYPE:
return new IntArrayColumn(new int[] {}, 0);
case LONG_TYPE:
return new LongColumn(new long[] {}, 0);
case STRING_TYPE:
return new StringArrayColumn(new String[] {}, 0);
}
throw new UnsupportedOperationException("Allocating an empty column of type " + type);
}
/**
* A default implementation to serialize any Boolean column to a proto. Full copy.
*
* @param column the column to be serialized.
* @return a ColumnMessage with a BooleanColumn member.
*/
protected static final ColumnMessage defaultBooleanProto(final Column<?> column) {
ByteString.Output bytes = ByteString.newOutput((column.size() + 7) / 8);
int bitCnt = 0;
int b = 0;
for (int i = 0; i < column.size(); ++i) {
if (column.getBoolean(i)) {
b |= (1 << bitCnt);
}
bitCnt++;
if (bitCnt == 8) {
bytes.write(b);
bitCnt = 0;
b = 0;
}
}
/* Note that we do *not* build the inner class. We pass its builder instead. */
final BooleanColumnMessage.Builder inner =
BooleanColumnMessage.newBuilder().setData(bytes.toByteString());
return ColumnMessage.newBuilder()
.setType(ColumnMessage.Type.BOOLEAN)
.setBooleanColumn(inner)
.build();
}
/**
* A default implementation to serialize any DateTime column to a proto. Full copy.
*
* @param column the column to be serialized.
* @return a ColumnMessage with a DateColumn member.
*/
protected static ColumnMessage defaultDateTimeProto(final Column<?> column) {
ByteBuffer dataBytes = ByteBuffer.allocate(column.size() * Long.SIZE / Byte.SIZE);
for (int i = 0; i < column.size(); i++) {
dataBytes.putLong(column.getDateTime(i).getMillis());
}
dataBytes.flip();
final DateTimeColumnMessage.Builder inner =
DateTimeColumnMessage.newBuilder().setData(ByteString.copyFrom(dataBytes));
return ColumnMessage.newBuilder()
.setType(ColumnMessage.Type.DATETIME)
.setDateColumn(inner)
.build();
}
/**
* A default implementation to serialize any Double column to a proto. Full copy.
*
* @param column the column to be serialized.
* @return a ColumnMessage with a DoubleColumn member.
*/
protected static ColumnMessage defaultDoubleProto(final Column<?> column) {
ByteBuffer dataBytes = ByteBuffer.allocate(column.size() * Double.SIZE / Byte.SIZE);
for (int i = 0; i < column.size(); i++) {
dataBytes.putDouble(column.getDouble(i));
}
dataBytes.flip();
final DoubleColumnMessage.Builder inner =
DoubleColumnMessage.newBuilder().setData(ByteString.copyFrom(dataBytes));
return ColumnMessage.newBuilder()
.setType(ColumnMessage.Type.DOUBLE)
.setDoubleColumn(inner)
.build();
}
/**
* A default implementation to serialize any Float column to a proto. Full copy.
*
* @param column the column to be serialized.
* @return a ColumnMessage with a FloatColumn member.
*/
protected static ColumnMessage defaultFloatProto(final Column<?> column) {
ByteBuffer dataBytes = ByteBuffer.allocate(column.size() * Float.SIZE / Byte.SIZE);
for (int i = 0; i < column.size(); i++) {
dataBytes.putFloat(column.getFloat(i));
}
dataBytes.flip();
final FloatColumnMessage.Builder inner =
FloatColumnMessage.newBuilder().setData(ByteString.copyFrom(dataBytes));
return ColumnMessage.newBuilder()
.setType(ColumnMessage.Type.FLOAT)
.setFloatColumn(inner)
.build();
}
/**
* A default implementation to serialize any Integer column to a proto. Full copy.
*
* @param column the column to be serialized.
* @return a ColumnMessage with an IntColumn member.
*/
protected static ColumnMessage defaultIntProto(final Column<?> column) {
ByteBuffer dataBytes = ByteBuffer.allocate(column.size() * Integer.SIZE / Byte.SIZE);
for (int i = 0; i < column.size(); i++) {
dataBytes.putInt(column.getInt(i));
}
dataBytes.flip();
final IntColumnMessage.Builder inner =
IntColumnMessage.newBuilder().setData(ByteString.copyFrom(dataBytes));
return ColumnMessage.newBuilder().setType(ColumnMessage.Type.INT).setIntColumn(inner).build();
}
/**
* A default implementation to serialize any Long column to a proto. Full copy.
*
* @param column the column to be serialized.
* @return a ColumnMessage with a LongColumn member.
*/
protected static ColumnMessage defaultLongProto(final Column<?> column) {
ByteBuffer dataBytes = ByteBuffer.allocate(column.size() * Long.SIZE / Byte.SIZE);
for (int i = 0; i < column.size(); i++) {
dataBytes.putLong(column.getLong(i));
}
dataBytes.flip();
final LongColumnMessage.Builder inner =
LongColumnMessage.newBuilder().setData(ByteString.copyFrom(dataBytes));
return ColumnMessage.newBuilder().setType(ColumnMessage.Type.LONG).setLongColumn(inner).build();
}
/**
* A default implementation to serialize any column to a proto. Full copy.
*
* @param column the column to be serialized.
* @return a ColumnMessage with an appropriate member.
*/
protected static ColumnMessage defaultProto(final Column<?> column) {
switch (column.getType()) {
case BOOLEAN_TYPE:
return defaultBooleanProto(column);
case DATETIME_TYPE:
return defaultDateTimeProto(column);
case DOUBLE_TYPE:
return defaultDoubleProto(column);
case FLOAT_TYPE:
return defaultFloatProto(column);
case INT_TYPE:
return defaultIntProto(column);
case LONG_TYPE:
return defaultLongProto(column);
case STRING_TYPE:
return defaultStringProto(column);
}
throw new UnsupportedOperationException("Serializing a column of type " + column.getType());
}
/**
* A default implementation to serialize any filtered column to a proto. Full copy.
*
* @param column the column to be serialized.
* @param validIndices the valid indices in the column.
* @return a ColumnMessage with an appropriate member.
*/
protected static ColumnMessage defaultProto(
final Column<?> column, final ImmutableIntArray validIndices) {
BitSet filter = new BitSet(column.size());
for (int i = 0; i < column.size(); ++i) {
filter.set(validIndices.get(i));
}
return defaultProto(new FilteredColumn<>(column, filter));
}
/**
* A default implementation to serialize any String column to a proto. Full copy.
*
* @param column the column to be serialized.
* @return a ColumnMessage with a StringColumn member.
*/
protected static ColumnMessage defaultStringProto(final Column<?> column) {
final StringColumnMessage.Builder inner = StringColumnMessage.newBuilder();
StringBuilder sb = new StringBuilder();
int startP = 0, endP = 0;
for (int i = 0; i < column.size(); i++) {
int len = column.getString(i).getBytes(StandardCharsets.UTF_8).length;
endP = startP + len;
inner.addStartIndices(startP);
inner.addEndIndices(endP);
sb.append(column.getString(i));
startP = endP;
}
inner.setData(ByteString.copyFromUtf8(sb.toString()));
return ColumnMessage.newBuilder()
.setType(ColumnMessage.Type.STRING)
.setStringColumn(inner)
.build();
}
} |
package com.haxademic.sketch.render;
import java.awt.image.BufferedImage;
import processing.core.PConstants;
import processing.core.PImage;
import processing.opengl.PShader;
import processing.video.Movie;
import com.haxademic.core.app.P;
import com.haxademic.core.app.PAppletHax;
import com.haxademic.core.draw.util.DrawUtil;
import com.haxademic.core.hardware.webcam.WebCamWrapper;
import com.haxademic.core.image.ImageUtil;
import com.haxademic.core.image.filters.BlobOuterMeshFilter;
import com.haxademic.core.image.filters.Cluster8BitRow;
import com.haxademic.core.image.filters.ImageHistogramFilter;
import com.haxademic.core.image.filters.PixelFilter;
import com.haxademic.core.image.filters.PixelTriFilter;
import com.haxademic.core.image.filters.ReflectionFilter;
import com.haxademic.core.render.VideoFrameGrabber;
import com.haxademic.core.system.FileUtil;
import com.jhlabs.image.ContrastFilter;
import com.jhlabs.image.HSBAdjustFilter;
@SuppressWarnings("serial")
public class MultiInputImageFilters
extends PAppletHax
{
protected int inputType;
protected final int WEBCAM = 0;
protected final int VIDEO = 1;
protected final int IMAGE = 2;
protected PImage _loadedImg;
protected PImage _curFrame;
protected VideoFrameGrabber _frameGrabber;
protected BlobOuterMeshFilter _blobFilter;
protected ReflectionFilter _reflectionFilter;
protected PixelTriFilter _pixelTriFilter;
protected PixelFilter _pixelFilter;
protected Cluster8BitRow _clusterRowFilter;
protected ImageHistogramFilter _histogramFilter;
protected PShader blur;
public void setup() {
super.setup();
initRender();
}
protected void overridePropsFile() {
_appConfig.setProperty( "rendering", "false" );
_appConfig.setProperty( "fps", "30" );
_appConfig.setProperty( "width", "1280" );
_appConfig.setProperty( "height", "720" );
}
public void initRender() {
inputType = IMAGE;
int w = 680;
int h = 680;
// blur = loadShader( FileUtil.getHaxademicDataPath()+"shaders/blur.glsl" );
switch( inputType ) {
case WEBCAM :
WebCamWrapper.initWebCam( p, w, h );
break;
case VIDEO :
_frameGrabber = new VideoFrameGrabber( p, "/Users/cacheflowe/Documents/workspace/haxademic/assets/media/video/Janet Jackson - Control - trimmed.mov", 30, 100 );
break;
case IMAGE :
_loadedImg = p.loadImage(FileUtil.getHaxademicDataPath()+"images/bobby-broadway.jpg");
break;
}
_blobFilter = new BlobOuterMeshFilter( w, h );
_reflectionFilter = new ReflectionFilter( w, h );
_pixelFilter = new PixelFilter( w, h, 2 );
_clusterRowFilter = new Cluster8BitRow( w, h, 8, false );
_histogramFilter = new ImageHistogramFilter( w, h, 6 );
_pixelTriFilter = new PixelTriFilter( w, h, 6 );
}
public void drawApp() {
// filter(blur);
p.background(0);
p.fill( 255 );
p.noStroke();
p.rectMode( PConstants.CENTER );
DrawUtil.setBasicLights( p );
// draw current frame and image filter
DrawUtil.setColorForPImage(this);
DrawUtil.setPImageAlpha(this, 1.0f);
p.translate(0, 0, -400);
// capture source image
switch( inputType ) {
case WEBCAM :
_curFrame = WebCamWrapper.getImage();
_curFrame = ImageUtil.getReversePImageFast( _curFrame ); // mirror mode
break;
case VIDEO :
_frameGrabber.setFrameIndex( p.frameCount );
_curFrame = _frameGrabber.frameImageCopy();
break;
case IMAGE :
_curFrame = _loadedImg;
break;
}
// draw source and processed/filtered images
// applyPostFilters();
applyImageFilters();
applyPostFilters();
p.image( _curFrame, 0, 0, _curFrame.width, _curFrame.height );
}
protected void applyImageFilters() {
// _curFrame = _histogramFilter.updateWithPImage( _curFrame );
// if( frameCount % 2 == 1 ) _curFrame = _clusterRowFilter.updateWithPImage( _curFrame );
// _curFrame = _pixelTriFilter.updateWithPImage( _curFrame ); // _clusterRowFilter.updateWithPImage(
// _curFrame = _pixelTriFilter.updateWithPImage( _histogramFilter.updateWithPImage( _curFrame ) );
// _curFrame = _blobFilter.updateWithPImage( _curFrame ); // _pixelFilter.updateWithPImage(
_curFrame = _pixelTriFilter.updateWithPImage( _histogramFilter.updateWithPImage( _reflectionFilter.updateWithPImage( _curFrame ) ) );
// _curFrame = _blobFilter.updateWithPImage( _pixelFilter.updateWithPImage( _curFrame ) );
// _curFrame = _pixelFilter.updateWithPImage( _curFrame );
}
protected void applyPostFilters() {
// create native java image
BufferedImage buff = ImageUtil.pImageToBuffered( _curFrame );
// contrast
ContrastFilter filt = new ContrastFilter();
filt.setBrightness(1.2f);
filt.setContrast(1.5f);
filt.filter(buff, buff);
// hsb adjust
HSBAdjustFilter hsb = new HSBAdjustFilter();
hsb.setHFactor(P.sin(p.frameCount/400f));
hsb.setSFactor(0.2f);
hsb.setBFactor(0.2f);
hsb.filter(buff, buff);
// glow
// GlowFilter glow = new GlowFilter();
// glow.setRadius(20f);
// glow.filter(buff, buff);
// bump
// BumpFilter bump = new BumpFilter();
// bump.filter(buff, buff);
// edge
// EdgeFilter edge = new EdgeFilter();
// edge.filter(buff, buff);
// motion blur
// MotionBlurFilter blur = new MotionBlurFilter();
// blur.setAngle(P.TWO_PI/16f);
// blur.setDistance(30f);
// blur.filter(buff, buff);
// ray
// RaysFilter ray = new RaysFilter();
// ray.setAngle(P.TWO_PI/8f);
// ray.setDistance(60f);
// ray.filter(buff, buff);
// kaleidoscope
// KaleidoscopeFilter kaleida = new KaleidoscopeFilter();
// kaleida.setSides(8);
// kaleida.filter(buff, buff);
// contrast again
filt.filter(buff, buff);
// save processed image back to _curFrame
_curFrame = ImageUtil.bufferedToPImage( buff );
}
protected void drawSourceFrame() {
p.pushMatrix();
p.translate(0,0,-5);
p.image(_curFrame,0,0,_curFrame.width,_curFrame.height);
p.popMatrix();
}
// Called every time a new frame is available to read
public void movieEvent(Movie m) {
m.read();
}
} |
package com.legit2.Demigods.Listeners;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.OfflinePlayer;
import org.bukkit.block.Block;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockBurnEvent;
import org.bukkit.event.block.BlockDamageEvent;
import org.bukkit.event.block.BlockIgniteEvent;
import org.bukkit.event.block.BlockPistonExtendEvent;
import org.bukkit.event.block.BlockPistonRetractEvent;
import org.bukkit.event.entity.EntityExplodeEvent;
import org.bukkit.event.entity.EntityInteractEvent;
import org.bukkit.event.inventory.InventoryCloseEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.event.player.PlayerMoveEvent;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import com.legit2.Demigods.DConfig;
import com.legit2.Demigods.DDivineBlocks;
import com.legit2.Demigods.Demigods;
import com.legit2.Demigods.DTributeValue;
import com.legit2.Demigods.Utilities.DCharUtil;
import com.legit2.Demigods.Utilities.DDataUtil;
import com.legit2.Demigods.Utilities.DPlayerUtil;
import com.legit2.Demigods.Utilities.DMiscUtil;
public class DDivineBlockListener implements Listener
{
static Demigods plugin;
public static double FAVORMULTIPLIER = DConfig.getSettingDouble("global_favor_multiplier");
public static int RADIUS = 8;
public DDivineBlockListener(Demigods instance)
{
plugin = instance;
}
@EventHandler (priority = EventPriority.HIGH)
public void shrineInteract(PlayerInteractEvent event)
{
// Exit method if it isn't a block of gold or if the player is mortal
if(!DCharUtil.isImmortal(event.getPlayer())) return;
if(event.getAction() != Action.LEFT_CLICK_BLOCK && event.getAction() != Action.RIGHT_CLICK_BLOCK) return;
// Define variables
Location location = event.getClickedBlock().getLocation();
Player player = event.getPlayer();
int charID = DPlayerUtil.getCurrentChar(player);
String charAlliance = DCharUtil.getAlliance(charID);
String charDeity = DCharUtil.getDeity(charID);
if(event.getAction() == Action.RIGHT_CLICK_BLOCK && event.getPlayer().getItemInHand().getType() == Material.BOOK)
{
try
{
// Shrine created!
ArrayList<Location> locations = new ArrayList<Location>(); locations.add(location);
DDivineBlocks.createShrine(charID, locations);
location.getWorld().getBlockAt(location).setType(Material.BEDROCK);
location.getWorld().spawnEntity(location.add(0.5, 0.0, 0.5), EntityType.ENDER_CRYSTAL);
location.getWorld().strikeLightningEffect(location);
player.sendMessage(ChatColor.GRAY + "The " + ChatColor.YELLOW + charAlliance + "s" + ChatColor.GRAY + " are pleased...");
player.sendMessage(ChatColor.GRAY + "A shrine has been created in honor of " + ChatColor.YELLOW + charDeity + ChatColor.GRAY + "!");
}
catch(Exception e)
{
// Creation of shrine failed...
e.printStackTrace();
}
}
try
{
// Check if block is divine
String shrineDeity = DDivineBlocks.getDeityAtShrine(location);
if(shrineDeity == null) return;
// Check if character has deity
if(DCharUtil.hasDeity(charID, shrineDeity))
{
// Open the tribute inventory
Inventory ii = DMiscUtil.getPlugin().getServer().createInventory(player, 27, "Tributes");
player.openInventory(ii);
DDataUtil.saveCharData(charID, "temp_tributing", DDivineBlocks.getOwnerOfShrine(event.getClickedBlock().getLocation()));
event.setCancelled(true);
return;
}
player.sendMessage(ChatColor.YELLOW + "You must be allied to " + shrineDeity + " in order to tribute here.");
}
catch(Exception e) {}
}
@EventHandler (priority = EventPriority.MONITOR)
public void playerTribute(InventoryCloseEvent event)
{
try
{
if(!(event.getPlayer() instanceof Player)) return;
Player player = (Player)event.getPlayer();
int charID = DPlayerUtil.getCurrentChar(player);
if(!DCharUtil.isImmortal(player)) return;
// If it isn't a tribute chest then break the method
if(!event.getInventory().getName().equals("Tributes")) return;
// Get the creator of the shrine
//int shrineCreator = DDivineBlocks.getOwnerOfShrine((Location) DDataUtil.getCharData(charID, "tributing_temp"));
DDataUtil.removeCharData(charID, "temp_tributing");
//calculate value of chest
int tirbuteValue = 0;
int items = 0;
for(ItemStack ii : event.getInventory().getContents())
{
if(ii != null)
{
tirbuteValue += DTributeValue.getTributeValue(ii);
items ++;
}
}
tirbuteValue *= FAVORMULTIPLIER;
// Give devotion
int devotionBefore = DCharUtil.getDevotion(charID);
DCharUtil.giveDevotion(charID, tirbuteValue);
DCharUtil.giveDevotion(charID, tirbuteValue / 7);
// Give favor
int favorBefore = DCharUtil.getMaxFavor(charID);
//DUtil.setFavorCap(player, DUtil.getFavorCap(username)+value/5); TODO
// Devotion lock TODO
String charName = DCharUtil.getName(charID);
if(devotionBefore < DCharUtil.getDevotion(charID)) player.sendMessage(ChatColor.YELLOW + "Your devotion to " + charName + " has increased to " + DCharUtil.getDevotion(charID) + ".");
if(favorBefore < DCharUtil.getMaxFavor(charID)) player.sendMessage(ChatColor.YELLOW + "Your favor cap has increased to " + DCharUtil.getMaxFavor(charID) + ".");
if((favorBefore == DCharUtil.getMaxFavor(charID)) && (devotionBefore == DCharUtil.getDevotion(charID)) && (items > 0)) player.sendMessage(ChatColor.YELLOW + "Your tributes were insufficient for " + charName + "'s blessings.");
// Clear the tribute case
event.getInventory().clear();
}
catch(Exception e) {}
}
@EventHandler(priority = EventPriority.HIGHEST)
public static void stopDestroyEnderCrystal(EntityInteractEvent event)
{
try
{
for(Location divineBlock : DDivineBlocks.getAllDivineBlocks())
{
if(event.getEntity().getLocation().subtract(0.5, 0, 0.5).equals(divineBlock))
{
event.setCancelled(true);
return;
}
}
}
catch(Exception e)
{
e.printStackTrace();
}
}
@EventHandler(priority = EventPriority.HIGHEST)
public static void stopDestroyDivineBlock(BlockBreakEvent event)
{
try
{
for(Location divineBlock : DDivineBlocks.getAllDivineBlocks())
{
if(event.getBlock().getLocation().equals(divineBlock))
{
event.getPlayer().sendMessage(ChatColor.YELLOW + "Divine blocks cannot be broken by hand.");
event.setCancelled(true);
return;
}
}
}
catch(Exception e) {}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void stopDivineBlockDamage(BlockDamageEvent event)
{
try
{
for(Location divineBlock : DDivineBlocks.getAllDivineBlocks())
{
if(event.getBlock().getLocation().equals(divineBlock))
{
event.setCancelled(true);
}
}
}
catch(Exception e) {}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void stopDivineBlockIgnite(BlockIgniteEvent event)
{
try
{
for(Location divineBlock : DDivineBlocks.getAllDivineBlocks())
{
if(event.getBlock().getLocation().equals(divineBlock))
{
event.setCancelled(true);
}
}
}
catch(Exception e) {}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void stopDivineBlockBurn(BlockBurnEvent event)
{
try
{
for(Location divineBlock : DDivineBlocks.getAllDivineBlocks())
{
if(event.getBlock().getLocation().equals(divineBlock))
{
event.setCancelled(true);
}
}
}
catch(Exception e) {}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void stopDivineBlockPistonExtend(BlockPistonExtendEvent event)
{
List<Block> blocks = event.getBlocks();
CHECKBLOCKS:
for(Block block : blocks)
{
try
{
for(Location divineBlock : DDivineBlocks.getAllDivineBlocks())
{
if(block.getLocation().equals(divineBlock))
{
event.setCancelled(true);
break CHECKBLOCKS;
}
}
}
catch(Exception e)
{
e.printStackTrace();
}
}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void stopDivineBlockPistonRetract(BlockPistonRetractEvent event)
{
// Define variables
final Block block = event.getBlock().getRelative(event.getDirection(), 2);
try
{
for(Location divineBlock : DDivineBlocks.getAllDivineBlocks())
{
if(block.getLocation().equals((divineBlock)) && event.isSticky())
{
event.setCancelled(true);
}
}
}
catch(Exception e) {}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void divineBlockExplode(final EntityExplodeEvent event)
{
try
{
// Remove divineBlock blocks from explosions
Iterator<Block> i = event.blockList().iterator();
while(i.hasNext())
{
Block block = i.next();
if(!DMiscUtil.canLocationPVP(block.getLocation())) i.remove();
for(Location divineBlock : DDivineBlocks.getAllDivineBlocks())
{
if(block.getLocation().equals(divineBlock)) i.remove();
}
}
}
catch (Exception er) {}
}
@EventHandler(priority = EventPriority.HIGH)
public void divineBlockAlerts(PlayerMoveEvent event)
{
if(event.getFrom().distance(event.getTo()) < 0.1) return;
for(int charID : DMiscUtil.getImmortalList())
{
try
{
if(DDivineBlocks.getShrines(charID) != null)
{
// Define variables
OfflinePlayer charOwner = DCharUtil.getOwner(charID);
for(Location divineBlock : DDivineBlocks.getShrines(charID))
{
// Check for world errors
if(!divineBlock.getWorld().equals(event.getPlayer().getWorld())) return;
if(event.getFrom().getWorld() != divineBlock.getWorld()) return;
/*
* Outside coming in
*/
if(event.getFrom().distance(divineBlock) > RADIUS)
{
if(divineBlock.distance(event.getTo()) <= RADIUS)
{
event.getPlayer().sendMessage(ChatColor.GRAY + "You have entered " + charOwner.getName() + "'s Shrine to " + ChatColor.YELLOW + DDivineBlocks.getDeityAtShrine(divineBlock) + ChatColor.GRAY + ".");
return;
}
}
/*
* Leaving
*/
else if(event.getFrom().distance(divineBlock) <= RADIUS)
{
if(divineBlock.distance(event.getTo()) > RADIUS)
{
event.getPlayer().sendMessage(ChatColor.GRAY + "You have left a holy area.");
return;
}
}
}
}
} catch(Exception e){}
}
}
} |
package com.nilhcem.hostseditor.list;
import java.util.List;
import javax.inject.Inject;
import android.text.TextUtils;
import android.util.Log;
import android.widget.Filter;
import com.nilhcem.hostseditor.bus.event.RefreshHostsEvent;
import com.nilhcem.hostseditor.core.Host;
import com.nilhcem.hostseditor.core.HostsManager;
import com.squareup.otto.Bus;
public class ListHostsSearchFilter extends Filter {
private static final String TAG = "ListHostsSearchFilter";
@Inject Bus mBus;
@Inject HostsManager mHostsManager;
@SuppressWarnings("unchecked")
@Override
protected void publishResults(CharSequence constraint, FilterResults results) {
if (!TextUtils.isEmpty(constraint)) {
Log.d(TAG, "Publishing result for: " + constraint);
}
mBus.post(new RefreshHostsEvent((List<Host>) results.values));
}
@Override
protected FilterResults performFiltering(CharSequence constraint) {
if (!TextUtils.isEmpty(constraint)) {
Log.d(TAG, "Perform filtering for: " + constraint);
}
FilterResults results = new FilterResults();
results.values = mHostsManager.filterHosts(constraint);
return results;
}
} |
package com.opengamma.engine.view;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Future;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetResolver;
import com.opengamma.engine.ComputationTargetResolverAdapter;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.ComputationTargetType;
import com.opengamma.engine.depgraph.DependencyGraph;
import com.opengamma.engine.depgraph.DependencyGraphBuilder;
import com.opengamma.engine.depgraph.DependencyNode;
import com.opengamma.engine.depgraph.DependencyNodeFormatter;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionResolver;
import com.opengamma.engine.livedata.LiveDataAvailabilityProvider;
import com.opengamma.engine.position.AbstractPortfolioNodeTraversalCallback;
import com.opengamma.engine.position.Portfolio;
import com.opengamma.engine.position.PortfolioImpl;
import com.opengamma.engine.position.PortfolioNode;
import com.opengamma.engine.position.PortfolioNodeImpl;
import com.opengamma.engine.position.PortfolioNodeTraverser;
import com.opengamma.engine.position.Position;
import com.opengamma.engine.position.PositionImpl;
import com.opengamma.engine.security.Security;
import com.opengamma.engine.security.SecurityMaster;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.id.IdentifierBundle;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.monitor.OperationTimer;
// REVIEW kirk 2009-09-16 -- The design goal here is that the portfolio
// evaluation model will be capable of incrementally maintaining its
// state based on certain changes (new position, position quantity changed)
// in the underlying portfolio, but for the time being, I just needed to
// move everything that's static for the portfolio out of the
// eval loop for performance.
/**
* Holds all data that is specific to a particular version of a {@link Portfolio},
* and must be re-evaluated when the portfolio changes.
*/
public class PortfolioEvaluationModel {
private static final Logger s_logger = LoggerFactory.getLogger(PortfolioEvaluationModel.class);
private static final boolean OUTPUT_DEPENDENCY_GRAPHS = false;
private static final boolean OUTPUT_LIVE_DATA_REQUIREMENTS = false;
private Portfolio _portfolio;
// REVIEW kirk 2010-03-29 -- Use a sorted map here?
private final Map<String, DependencyGraph> _graphsByConfiguration =
new ConcurrentHashMap<String, DependencyGraph>();
private final Map<IdentifierBundle, Security> _securitiesByKey = new ConcurrentHashMap<IdentifierBundle, Security>();
// REVIEW kirk 2009-09-14 -- HashSet is almost certainly the wrong set here.
private final Set<Position> _populatedPositions = new HashSet<Position>();
private final Set<Security> _securities = new HashSet<Security>();
private final Set<ValueRequirement> _liveDataRequirements = new HashSet<ValueRequirement>();
public PortfolioEvaluationModel(Portfolio portfolio) {
ArgumentChecker.notNull(portfolio, "Portfolio");
_portfolio = portfolio;
}
/**
* @return the portfolio
*/
public Portfolio getPortfolio() {
return _portfolio;
}
/**
* @param populatedRootNode the populatedRootNode to set
*/
public void setPopulatedRootNode(PortfolioNodeImpl populatedRootNode) {
_portfolio = new PortfolioImpl(_portfolio.getUniqueIdentifier(), _portfolio.getName(), populatedRootNode);
}
/**
* @return the positions
*/
public Set<Position> getPopulatedPositions() {
return _populatedPositions;
}
/**
* @return the securities
*/
public Set<Security> getSecurities() {
return _securities;
}
public Set<String> getAllCalculationConfigurationNames() {
return new TreeSet<String>(_graphsByConfiguration.keySet());
}
public Set<String> getAllOutputValueNames() {
Set<String> valueNames = new HashSet<String>();
for (DependencyGraph graph : getAllDependencyGraphs()) {
for (ValueSpecification spec : graph.getOutputValues()) {
valueNames.add(spec.getRequirementSpecification().getValueName());
}
}
return valueNames;
}
public Collection<DependencyGraph> getAllDependencyGraphs() {
return new ArrayList<DependencyGraph>(_graphsByConfiguration.values());
}
public DependencyGraph getDependencyGraph(String calcConfigName) {
return _graphsByConfiguration.get(calcConfigName);
}
private class ResolvedSecurityComputationTargetResolver extends ComputationTargetResolverAdapter {
private final Map<UniqueIdentifier, Position> _positionsByUID = new HashMap<UniqueIdentifier, Position>();
private final Map<UniqueIdentifier, Security> _securitiesByUID = new HashMap<UniqueIdentifier, Security>();
private final Map<UniqueIdentifier, PortfolioNode> _portfolioNodeByUID = new HashMap<UniqueIdentifier, PortfolioNode>();
public ResolvedSecurityComputationTargetResolver(final ComputationTargetResolver defaultResolver) {
super(defaultResolver);
for (final Position position : getPopulatedPositions()) {
_positionsByUID.put(position.getUniqueIdentifier(), position);
}
for (final Security security : getSecurities()) {
_securitiesByUID.put(security.getUniqueIdentifier(), security);
}
populatePortfolioNodeByUID(getPortfolio().getRootNode());
}
private void populatePortfolioNodeByUID(final PortfolioNode portfolioNode) {
_portfolioNodeByUID.put(portfolioNode.getUniqueIdentifier(), portfolioNode);
for (final PortfolioNode child : portfolioNode.getChildNodes()) {
populatePortfolioNodeByUID(child);
}
}
@Override
public ComputationTarget resolve(ComputationTargetSpecification specification) {
UniqueIdentifier uid = specification.getUniqueIdentifier();
switch (specification.getType()) {
case SECURITY: {
Security security = _securitiesByUID.get(uid);
s_logger.debug("Security ID {} requested, pre-resolved to {}", uid, security);
if (security == null) {
break;
}
return new ComputationTarget(ComputationTargetType.SECURITY, security);
}
case POSITION: {
Position position = _positionsByUID.get(uid);
s_logger.debug("Position ID {} requested, pre-resolved to {}", uid, position);
if (position == null) {
break;
}
return new ComputationTarget(ComputationTargetType.POSITION, position);
}
case PORTFOLIO_NODE : {
PortfolioNode portfolioNode = _portfolioNodeByUID.get(uid);
s_logger.debug("PortfolioNode ID {} requested, pre-resolved to {}", uid, portfolioNode);
if (portfolioNode == null) {
break;
}
return new ComputationTarget(ComputationTargetType.PORTFOLIO_NODE, portfolioNode);
}
}
return super.resolve(specification);
}
}
public void init(
ViewCompilationServices viewCompilationServices,
ViewDefinition viewDefinition) {
ArgumentChecker.notNull(viewCompilationServices, "View Compilation Services");
ArgumentChecker.notNull(viewDefinition, "View Definition");
// Resolve all of the securities
resolveSecurities(viewCompilationServices);
PortfolioNodeImpl populatedRootNode = getPopulatedPortfolioNode(getPortfolio().getRootNode());
assert populatedRootNode != null;
setPopulatedRootNode(populatedRootNode);
loadPositions();
loadSecurities();
buildDependencyGraphs(
viewCompilationServices.getFunctionResolver(),
viewCompilationServices.getCompilationContext(),
viewCompilationServices.getLiveDataAvailabilityProvider(),
new ResolvedSecurityComputationTargetResolver(viewCompilationServices.getComputationTargetResolver()),
viewDefinition);
if (OUTPUT_DEPENDENCY_GRAPHS) {
outputDependencyGraphs();
}
if (OUTPUT_LIVE_DATA_REQUIREMENTS) {
outputLiveDataRequirements(viewCompilationServices.getSecurityMaster());
}
refreshLiveDataRequirements();
}
private void outputDependencyGraphs() {
StringBuilder sb = new StringBuilder();
for (Map.Entry<String, DependencyGraph> entry : _graphsByConfiguration.entrySet()) {
String configName = entry.getKey();
sb.append("DepGraph for ").append(configName);
DependencyGraph depGraph = entry.getValue();
sb.append("\tProducing values ").append(depGraph.getOutputValues());
for (DependencyNode depNode : depGraph.getDependencyNodes()) {
sb.append("\t\tNode:\n").append(DependencyNodeFormatter.toString(depNode));
}
}
s_logger.warn("Dependency Graphs -- \n{}", sb);
}
private void outputLiveDataRequirements(SecurityMaster secMaster) {
StringBuilder sb = new StringBuilder();
for (Map.Entry<String, DependencyGraph> entry : _graphsByConfiguration.entrySet()) {
String configName = entry.getKey();
Collection<ValueRequirement> requiredLiveData = entry.getValue().getAllRequiredLiveData();
if (requiredLiveData.isEmpty()) {
sb.append(configName).append(" requires no live data.\n");
} else {
sb.append("Live data for ").append(configName).append("\n");
for (ValueRequirement liveRequirement : requiredLiveData) {
sb.append("\t").append(liveRequirement.getTargetSpecification().getRequiredLiveData(secMaster)).append("\n");
}
}
}
s_logger.warn("Live data requirements -- \n{}", sb);
}
protected void resolveSecurities(final ViewCompilationServices viewCompilationServices) {
// TODO kirk 2010-03-07 -- Need to switch to OperationTimer for this.
OperationTimer timer = new OperationTimer(s_logger, "Resolving all securities for {}", getPortfolio().getName());
Set<IdentifierBundle> securityKeys = getSecurityKeysForResolution(getPortfolio().getRootNode());
ExecutorCompletionService<IdentifierBundle> completionService = new ExecutorCompletionService<IdentifierBundle>(viewCompilationServices.getExecutorService());
boolean failed = false;
for (IdentifierBundle secKey : securityKeys) {
if (secKey == null) {
failed = true;
s_logger.warn("Had null security key in at least one position");
} else {
completionService.submit(new SecurityResolutionJob(viewCompilationServices.getSecurityMaster(), secKey), secKey);
}
}
for (int i = 0; i < securityKeys.size(); i++) {
Future<IdentifierBundle> future = null;
try {
future = completionService.take();
} catch (InterruptedException e1) {
Thread.interrupted();
s_logger.warn("Interrupted, so didn't finish resolution.");
failed = true;
break;
}
try {
future.get();
} catch (Exception e) {
s_logger.warn("Got exception resolving securities", e);
failed = true;
}
}
if (failed) {
throw new OpenGammaRuntimeException("Unable to resolve all securities for Portfolio " + getPortfolio().getName());
}
timer.finished();
}
/**
* A small job that can be run in an executor to resolve a security against
* a {@link SecurityMaster}.
*/
protected class SecurityResolutionJob implements Runnable {
private final SecurityMaster _securityMaster;
private final IdentifierBundle _securityKey;
public SecurityResolutionJob(
SecurityMaster securityMaster,
IdentifierBundle securityKey) {
_securityMaster = securityMaster;
_securityKey = securityKey;
}
@Override
public void run() {
Security security = null;
try {
security = _securityMaster.getSecurity(_securityKey);
} catch (Exception e) {
throw new OpenGammaRuntimeException("Exception while resolving SecurityKey " + _securityKey, e);
}
if (security == null) {
throw new OpenGammaRuntimeException("Unable to resolve security key " + _securityKey);
} else {
_securitiesByKey.put(_securityKey, security);
}
}
}
protected Set<IdentifierBundle> getSecurityKeysForResolution(PortfolioNode node) {
Set<IdentifierBundle> result = new TreeSet<IdentifierBundle>();
for (Position position : node.getPositions()) {
if (position.getSecurity() != null) {
// Nothing to do here; they pre-resolved the security.
s_logger.debug("Security pre-resolved by PositionMaster for {}", position.getUniqueIdentifier());
} else if (position.getSecurityKey() != null) {
result.add(position.getSecurityKey());
} else {
throw new IllegalArgumentException("Security or security key must be provided: " + position.getUniqueIdentifier());
}
}
for (PortfolioNode subNode : node.getChildNodes()) {
result.addAll(getSecurityKeysForResolution(subNode));
}
return result;
}
protected PortfolioNodeImpl getPopulatedPortfolioNode(
PortfolioNode node) {
if (node == null) {
return null;
}
PortfolioNodeImpl populatedNode = new PortfolioNodeImpl(node.getUniqueIdentifier(), node.getName());
for (Position position : node.getPositions()) {
Security security = position.getSecurity();
if (position.getSecurity() == null) {
security = _securitiesByKey.get(position.getSecurityKey());
}
if (security == null) {
throw new OpenGammaRuntimeException("Unable to resolve security key " + position.getSecurityKey() + " for position " + position);
}
PositionImpl populatedPosition = new PositionImpl(position.getUniqueIdentifier(), position.getQuantity(), position.getSecurityKey(), security);
populatedNode.addPosition(populatedPosition);
}
for (PortfolioNode child : node.getChildNodes()) {
populatedNode.addChildNode(getPopulatedPortfolioNode(child));
}
return populatedNode;
}
public void loadPositions() {
OperationTimer timer = new OperationTimer(s_logger, "Loading positions on {}", getPortfolio().getName());
PortfolioNode populatedRootNode = getPortfolio().getRootNode();
loadPositions(populatedRootNode);
timer.finished();
s_logger.debug("Operating on {} positions", getPopulatedPositions().size());
}
protected void loadPositions(PortfolioNode node) {
getPopulatedPositions().addAll(node.getPositions());
for (PortfolioNode child : node.getChildNodes()) {
loadPositions(child);
}
}
public void loadSecurities() {
// REVIEW kirk 2010-03-07 -- This is necessary because securities might have
// been pre-resolved, so we can't just rely on the map from SecurityKey to Security
// that we build up during resolution.
for (Position position : getPopulatedPositions()) {
getSecurities().add(position.getSecurity());
}
}
public void buildDependencyGraphs(
FunctionResolver functionResolver,
FunctionCompilationContext compilationContext,
LiveDataAvailabilityProvider liveDataAvailabilityProvider,
ComputationTargetResolver computationTargetResolver,
ViewDefinition viewDefinition) {
OperationTimer timer = new OperationTimer(s_logger, "Building dependency graphs {}", getPortfolio().getName());
// REVIEW kirk 2010-03-29 -- Much like the inner loop, the outer loop is chock-full
// of potentially expensive operations for parallelism. In fact, perhaps more so
// than the inner loop.
for (Map.Entry<String, ViewCalculationConfiguration> entry : viewDefinition.getAllCalculationConfigurationsByName().entrySet()) {
String configName = entry.getKey();
ViewCalculationConfiguration calcConfig = entry.getValue();
DependencyGraphBuilder dependencyGraphBuilder = new DependencyGraphBuilder();
dependencyGraphBuilder.setLiveDataAvailabilityProvider(liveDataAvailabilityProvider);
dependencyGraphBuilder.setTargetResolver(computationTargetResolver);
dependencyGraphBuilder.setFunctionResolver(functionResolver);
dependencyGraphBuilder.setCompilationContext(compilationContext);
dependencyGraphBuilder.setCalculationConfigurationName(configName);
PortfolioNodeCompiler compiler = new PortfolioNodeCompiler(dependencyGraphBuilder, calcConfig);
new PortfolioNodeTraverser(compiler).traverse(getPortfolio().getRootNode());
DependencyGraph depGraph = dependencyGraphBuilder.getDependencyGraph();
depGraph.removeUnnecessaryValues();
_graphsByConfiguration.put(configName, depGraph);
}
timer.finished();
}
public Set<ValueRequirement> getAllLiveDataRequirements() {
return Collections.unmodifiableSet(_liveDataRequirements);
}
public void refreshLiveDataRequirements() {
for (DependencyGraph dependencyGraph : _graphsByConfiguration.values()) {
Set<ValueRequirement> requiredLiveData = dependencyGraph.getAllRequiredLiveData();
_liveDataRequirements.addAll(requiredLiveData);
}
}
public Set<ComputationTargetSpecification> getAllComputationTargets() {
Set<ComputationTargetSpecification> targets = new HashSet<ComputationTargetSpecification>();
for (DependencyGraph dependencyGraph : _graphsByConfiguration.values()) {
Set<ComputationTargetSpecification> requiredLiveData = dependencyGraph.getAllComputationTargets();
targets.addAll(requiredLiveData);
}
return targets;
}
/**
* Gathers all security types.
*/
protected static class SubNodeSecurityTypeAccumulator extends AbstractPortfolioNodeTraversalCallback {
private final Set<String> _subNodeSecurityTypes = new TreeSet<String>();
/**
* @return the subNodeSecurityTypes
*/
public Set<String> getSubNodeSecurityTypes() {
return _subNodeSecurityTypes;
}
@Override
public void preOrderOperation(Position position) {
_subNodeSecurityTypes.add(position.getSecurity().getSecurityType());
}
}
/**
* Compiles dependency graphs for each stage in a portfolio tree.
*/
protected static class PortfolioNodeCompiler extends AbstractPortfolioNodeTraversalCallback {
private final DependencyGraphBuilder _dependencyGraphBuilder;
private final ViewCalculationConfiguration _calculationConfiguration;
public PortfolioNodeCompiler(
DependencyGraphBuilder dependencyGraphBuilder,
ViewCalculationConfiguration calculationConfiguration) {
_dependencyGraphBuilder = dependencyGraphBuilder;
_calculationConfiguration = calculationConfiguration;
}
@Override
public void preOrderOperation(PortfolioNode portfolioNode) {
// Yes, we could in theory do this outside the loop by implementing more
// callbacks, but it might have gotten hairy, so for the first pass I just
// did it this way.
Set<String> subNodeSecurityTypes = getSubNodeSecurityTypes(portfolioNode);
Map<String, Set<String>> outputsBySecurityType = _calculationConfiguration.getValueRequirementsBySecurityTypes();
for (String secType : subNodeSecurityTypes) {
Set<String> requiredOutputs = outputsBySecurityType.get(secType);
if ((requiredOutputs == null) || requiredOutputs.isEmpty()) {
continue;
}
Set<ValueRequirement> requirements = new HashSet<ValueRequirement>();
// first do the portfolio node targets (aggregated, multiple-position nodes), if they're needed
if (_calculationConfiguration.getDefinition().isComputePortfolioNodeCalculations()) {
for (String requiredOutput : requiredOutputs) {
requirements.add(new ValueRequirement(requiredOutput, portfolioNode));
}
_dependencyGraphBuilder.addTarget(new ComputationTarget(ComputationTargetType.PORTFOLIO_NODE, portfolioNode), requirements);
}
// now do the position nodes targets, if they're needed
if (_calculationConfiguration.getDefinition().isComputePositionNodeCalculations()) {
for (Position position : portfolioNode.getPositions()) {
requirements.clear();
for (String requiredOutput : requiredOutputs) {
requirements.add(new ValueRequirement(requiredOutput, position));
}
_dependencyGraphBuilder.addTarget(new ComputationTarget(ComputationTargetType.POSITION, position), requirements);
}
}
// now do the per-security targets, if they're needed
if (_calculationConfiguration.getDefinition().isComputeSecurityNodeCalculations()) {
for (Position position : portfolioNode.getPositions()) {
requirements.clear();
for (String requiredOutput : requiredOutputs) {
requirements.add(new ValueRequirement(requiredOutput, position.getSecurity()));
}
_dependencyGraphBuilder.addTarget(new ComputationTarget(ComputationTargetType.SECURITY, position.getSecurity()), requirements);
}
}
}
}
}
/**
* @param portfolioNode
* @return
*/
private static Set<String> getSubNodeSecurityTypes(PortfolioNode portfolioNode) {
SubNodeSecurityTypeAccumulator accumulator = new SubNodeSecurityTypeAccumulator();
new PortfolioNodeTraverser(accumulator).traverse(portfolioNode);
return accumulator.getSubNodeSecurityTypes();
}
} |
package com.saabre.setup.operation.analysis;
import com.saabre.setup.helper.FileHelper;
import com.saabre.setup.module.analysis.AnalysisOperation;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.PrintWriter;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
* @author Lifaen
*/
public class ProcessLogFile extends AnalysisOperation
{
// -- Attributes --
private Pattern instructionPattern;
private List<Section> sectionList;
private boolean headerPrinted;
private Section currentSection;
private long currentTimestamp;
private Map<String, String> store;
private StringBuilder builder;
// -- Methods --
@Override
public void loadConfig() throws Exception
{
instructionPattern = Pattern.compile("^#([a-zA-Z]+) ?(.*)");
sectionList = new LinkedList<>();
currentTimestamp = 0;
store = null;
builder = new StringBuilder();
headerPrinted = false;
}
@Override
public void run() throws Exception
{
// Load section classes --
sectionList.add(new DfRawSection());
sectionList.add(new IostatSection());
sectionList.add(new MpstatAllSection());
// Read monitor file --
File f = new File(FileHelper.getAnalyisOutputFolder() + "monitor.log");
BufferedReader br = new BufferedReader(new FileReader(f));
String line;
while ((line = br.readLine()) != null) {
processLine(line);
}
br.close();
// Print CSV file --
PrintWriter writer = new PrintWriter(FileHelper.getAnalyisOutputFolder() + "monitor.csv", "UTF-8");
writer.append(builder);
writer.close();
}
private void processLine(String line)
{
if(line.matches("#.+")) // Instruction --
{
Matcher m = instructionPattern.matcher(line);
if(!m.find())
return; // Bad instruction --
String instruction = m.group(1);
String param = m.group(2);
if(instruction.equals("TIME")) // Handle time instruction --
{
currentTimestamp = Long.parseLong(param);
printRow();
}
else // Handle section instructions --
{
currentSection = null;
for (Section section : sectionList) {
if(section.getTag().equals(instruction)) {
currentSection = section;
break;
}
}
}
}
else if(!line.isEmpty())
{
if(currentSection != null)
currentSection.onLine(line);
}
}
public void store(String key, String value)
{
store.put(key, value);
}
public void printRow()
{
if(store != null)
{
if(!headerPrinted) // Second call, Header --
{
builder.append("time");
printSet(store.keySet());
headerPrinted = true;
}
// Since second call, Row --
builder.append(currentTimestamp);
printSet(store.values());
}
else // First call, Initialisation --
{
store = new LinkedHashMap<>();
}
}
public void printSet(Collection<String> set)
{
for (String key : set)
builder.append(";").append(key);
builder.append("\n");
}
// -- Internal classes --
private abstract class Section {
public abstract String getTag();
public abstract void onLine(String line);
}
private class DfRawSection extends Section {
@Override public String getTag() { return "DfRaw"; }
@Override
public void onLine(String line) {
// Ignore Header --
if(line.matches("Filesystem.*"))
return;
line = line.replaceAll(" {1,}", " ");
String[] cols = line.split(" ");
String prefix = "diskspace.";
String mount = "." + cols[5];
store(prefix + "total" + mount, cols[1]);
store(prefix + "used" + mount, cols[2]);
store(prefix + "free" + mount, cols[3]);
}
}
private class IostatSection extends Section {
@Override public String getTag() { return "Iostat"; }
@Override
public void onLine(String line) {
// Ignore Header --
if(line.matches("Linux.*"))
return;
line = line.replaceAll(" {2,}", " ");
String[] cols = line.split(" ");
String prefix = "diskio.";
String mount = "." + cols[0];
store(prefix + "tps" + mount, cols[1]); // Transfer per second --
store(prefix + "read" + mount, cols[2]); // Block read per second --
store(prefix + "write" + mount, cols[3]); // Block written per second --
}
}
private class MpstatAllSection extends Section {
@Override public String getTag() { return "MpstatAll"; }
@Override
public void onLine(String line) {
// Ignore Header --
if(line.matches("Linux.*"))
return;
line = line.replaceAll(" {1,}", " ");
String[] cols = line.split(" ");
String prefix = "cpu.";
String mount = "." + cols[2];
store(prefix + "usr" + mount, cols[2]);
store(prefix + "nice" + mount, cols[3]);
store(prefix + "sys" + mount, cols[4]);
store(prefix + "iowait" + mount, cols[5]);
store(prefix + "irq" + mount, cols[6]);
store(prefix + "soft" + mount, cols[7]);
store(prefix + "steal" + mount, cols[8]);
store(prefix + "guest" + mount, cols[9]);
store(prefix + "gnice" + mount, cols[10]);
store(prefix + "idle" + mount, cols[11]);
}
}
} |
package soot.jimple;
import soot.*;
import soot.jimple.internal.*;
import soot.util.*;
import java.util.*;
import java.io.*;
/**
The Jimple class contains all the constructors for the components of the Jimple
grammar for the Jimple body. <br><br>
Immediate -> Local | Constant <br>
RValue -> Local | Constant | ConcreteRef | Expr<br>
Variable -> Local | ArrayRef | InstanceFieldRef | StaticFieldRef <br>
*/
public class Jimple
{
public Jimple( Singletons.Global g ) {}
public static Jimple v() { return G.v().soot_jimple_Jimple(); }
public final static String NEWARRAY = "newarray";
public final static String NEWMULTIARRAY = "newmultiarray";
public final static String NOP = "nop";
public final static String RET = "ret";
public final static String SPECIALINVOKE = "specialinvoke";
public final static String STATICINVOKE = "staticinvoke";
public final static String TABLESWITCH = "tableswitch";
public final static String VIRTUALINVOKE = "virtualinvoke";
public final static String NULL_TYPE = "null_type";
public final static String UNKNOWN = "unknown";
public final static String CMP = "cmp";
public final static String CMPG = "cmpg";
public final static String CMPL = "cmpl";
public final static String ENTERMONITOR = "entermonitor";
public final static String EXITMONITOR = "exitmonitor";
public final static String INTERFACEINVOKE = "interfaceinvoke";
public final static String LENGTHOF = "lengthof";
public final static String LOOKUPSWITCH = "lookupswitch";
public final static String NEG = "neg";
public final static String IF = "if";
public final static String ABSTRACT = "abstract";
public final static String BOOLEAN = "boolean";
public final static String BREAK = "break";
public final static String BYTE = "byte";
public final static String CASE = "case";
public final static String CATCH = "catch";
public final static String CHAR = "char";
public final static String CLASS = "class";
public final static String FINAL = "final";
public final static String NATIVE = "native";
public final static String PUBLIC = "public";
public final static String PROTECTED = "protected";
public final static String PRIVATE = "private";
public final static String STATIC = "static";
public final static String SYNCHRONIZED = "synchronized";
public final static String TRANSIENT = "transient";
public final static String VOLATILE = "volatile";
public final static String INTERFACE = "interface";
public final static String VOID = "void";
public final static String SHORT = "short";
public final static String INT = "int";
public final static String LONG = "long";
public final static String FLOAT = "float";
public final static String DOUBLE = "double";
public final static String EXTENDS = "extends";
public final static String IMPLEMENTS = "implements";
public final static String BREAKPOINT = "breakpoint";
public final static String DEFAULT = "default";
public final static String GOTO = "goto";
public final static String INSTANCEOF = "instanceof";
public final static String NEW = "new";
public final static String RETURN = "return";
public final static String THROW = "throw";
public final static String THROWS = "throws";
public final static String NULL = "null";
public static boolean isJavaKeywordType(Type t)
{
return !(t instanceof StmtAddressType ||
t instanceof UnknownType ||
t instanceof RefType ||
(t instanceof ArrayType && (!isJavaKeywordType(((ArrayType)t).baseType))) ||
t instanceof ErroneousType );
}
public static Value cloneIfNecessary(Value val)
{
if( val instanceof Local || val instanceof Constant )
return val;
else
return (Value) val.clone();
}
/**
Constructs a XorExpr(Immediate, Immediate) grammar chunk.
*/
public XorExpr newXorExpr(Value op1, Value op2)
{
return new JXorExpr(op1, op2);
}
/**
Constructs a UshrExpr(Immediate, Immediate) grammar chunk.
*/
public UshrExpr newUshrExpr(Value op1, Value op2)
{
return new JUshrExpr(op1, op2);
}
/**
Constructs a SubExpr(Immediate, Immediate) grammar chunk.
*/
public SubExpr newSubExpr(Value op1, Value op2)
{
return new JSubExpr(op1, op2);
}
/**
Constructs a ShrExpr(Immediate, Immediate) grammar chunk.
*/
public ShrExpr newShrExpr(Value op1, Value op2)
{
return new JShrExpr(op1, op2);
}
/**
Constructs a ShlExpr(Immediate, Immediate) grammar chunk.
*/
public ShlExpr newShlExpr(Value op1, Value op2)
{
return new JShlExpr(op1, op2);
}
/**
Constructs a RemExpr(Immediate, Immediate) grammar chunk.
*/
public RemExpr newRemExpr(Value op1, Value op2)
{
return new JRemExpr(op1, op2);
}
/**
Constructs a OrExpr(Immediate, Immediate) grammar chunk.
*/
public OrExpr newOrExpr(Value op1, Value op2)
{
return new JOrExpr(op1, op2);
}
/**
Constructs a NeExpr(Immediate, Immediate) grammar chunk.
*/
public NeExpr newNeExpr(Value op1, Value op2)
{
return new JNeExpr(op1, op2);
}
/**
Constructs a MulExpr(Immediate, Immediate) grammar chunk.
*/
public MulExpr newMulExpr(Value op1, Value op2)
{
return new JMulExpr(op1, op2);
}
/**
Constructs a LeExpr(Immediate, Immediate) grammar chunk.
*/
public LeExpr newLeExpr(Value op1, Value op2)
{
return new JLeExpr(op1, op2);
}
/**
Constructs a GeExpr(Immediate, Immediate) grammar chunk.
*/
public GeExpr newGeExpr(Value op1, Value op2)
{
return new JGeExpr(op1, op2);
}
/**
Constructs a EqExpr(Immediate, Immediate) grammar chunk.
*/
public EqExpr newEqExpr(Value op1, Value op2)
{
return new JEqExpr(op1, op2);
}
/**
Constructs a DivExpr(Immediate, Immediate) grammar chunk.
*/
public DivExpr newDivExpr(Value op1, Value op2)
{
return new JDivExpr(op1, op2);
}
/**
Constructs a CmplExpr(Immediate, Immediate) grammar chunk.
*/
public CmplExpr newCmplExpr(Value op1, Value op2)
{
return new JCmplExpr(op1, op2);
}
/**
Constructs a CmpgExpr(Immediate, Immediate) grammar chunk.
*/
public CmpgExpr newCmpgExpr(Value op1, Value op2)
{
return new JCmpgExpr(op1, op2);
}
/**
Constructs a CmpExpr(Immediate, Immediate) grammar chunk.
*/
public CmpExpr newCmpExpr(Value op1, Value op2)
{
return new JCmpExpr(op1, op2);
}
/**
Constructs a GtExpr(Immediate, Immediate) grammar chunk.
*/
public GtExpr newGtExpr(Value op1, Value op2)
{
return new JGtExpr(op1, op2);
}
/**
Constructs a LtExpr(Immediate, Immediate) grammar chunk.
*/
public LtExpr newLtExpr(Value op1, Value op2)
{
return new JLtExpr(op1, op2);
}
/**
Constructs a AddExpr(Immediate, Immediate) grammar chunk.
*/
public AddExpr newAddExpr(Value op1, Value op2)
{
return new JAddExpr(op1, op2);
}
/**
Constructs a AndExpr(Immediate, Immediate) grammar chunk.
*/
public AndExpr newAndExpr(Value op1, Value op2)
{
return new JAndExpr(op1, op2);
}
/**
Constructs a NegExpr(Immediate, Immediate) grammar chunk.
*/
public NegExpr newNegExpr(Value op)
{
return new JNegExpr(op);
}
/**
Constructs a LengthExpr(Immediate) grammar chunk.
*/
public LengthExpr newLengthExpr(Value op)
{
return new JLengthExpr(op);
}
/**
Constructs a CastExpr(Immediate, Type) grammar chunk.
*/
public CastExpr newCastExpr(Value op1, Type t)
{
return new JCastExpr(op1, t);
}
/**
Constructs a InstanceOfExpr(Immediate, Type)
grammar chunk.
*/
public InstanceOfExpr newInstanceOfExpr(Value op1, Type t)
{
return new JInstanceOfExpr(op1, t);
}
/**
Constructs a NewExpr(RefType) grammar chunk.
*/
public NewExpr newNewExpr(RefType type)
{
return new JNewExpr(type);
}
/**
Constructs a NewArrayExpr(Type, Immediate) grammar chunk.
*/
public NewArrayExpr newNewArrayExpr(Type type, Value size)
{
return new JNewArrayExpr(type, size);
}
/**
Constructs a NewMultiArrayExpr(ArrayType, List of Immediate) grammar chunk.
*/
public NewMultiArrayExpr newNewMultiArrayExpr(ArrayType type, List sizes)
{
return new JNewMultiArrayExpr(type, sizes);
}
/**
Constructs a NewStaticInvokeExpr(ArrayType, List of Immediate) grammar chunk.
*/
public StaticInvokeExpr newStaticInvokeExpr(SootMethod method, List args)
{
return new JStaticInvokeExpr(method, args);
}
/**
Constructs a NewSpecialInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public SpecialInvokeExpr newSpecialInvokeExpr(Local base, SootMethod method, List args)
{
return new JSpecialInvokeExpr(base, method, args);
}
/**
Constructs a NewVirtualInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public VirtualInvokeExpr newVirtualInvokeExpr(Local base, SootMethod method, List args)
{
return new JVirtualInvokeExpr(base, method, args);
}
/**
Constructs a NewInterfaceInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public InterfaceInvokeExpr newInterfaceInvokeExpr(Local base, SootMethod method, List args)
{
return new JInterfaceInvokeExpr(base, method, args);
}
/**
Constructs a NewStaticInvokeExpr(ArrayType, List of Immediate) grammar chunk. (no args)
*/
public StaticInvokeExpr newStaticInvokeExpr(SootMethod method)
{
return new JStaticInvokeExpr(method, new ArrayList());
}
/**
Constructs a NewSpecialInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk. (no args)
*/
public SpecialInvokeExpr newSpecialInvokeExpr(Local base, SootMethod method)
{
return new JSpecialInvokeExpr(base, method, new ArrayList());
}
/**
Constructs a NewVirtualInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk. (no args)
*/
public VirtualInvokeExpr newVirtualInvokeExpr(Local base, SootMethod method)
{
return new JVirtualInvokeExpr(base, method, new ArrayList());
}
/**
Constructs a NewInterfaceInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk. (no args)
*/
public InterfaceInvokeExpr newInterfaceInvokeExpr(Local base, SootMethod method)
{
return new JInterfaceInvokeExpr(base, method, new ArrayList());
}
/**
Constructs a NewStaticInvokeExpr(ArrayType, List of Immediate) grammar chunk.
*/
public StaticInvokeExpr newStaticInvokeExpr(SootMethod method, Value arg)
{
return new JStaticInvokeExpr(method, Arrays.asList(new Value[] {arg}));
}
/**
Constructs a NewSpecialInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public SpecialInvokeExpr newSpecialInvokeExpr(Local base, SootMethod method, Value arg)
{
return new JSpecialInvokeExpr(base, method, Arrays.asList(new Value[] {arg}));
}
/**
Constructs a NewVirtualInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public VirtualInvokeExpr newVirtualInvokeExpr(Local base, SootMethod method, Value arg)
{
return new JVirtualInvokeExpr(base, method, Arrays.asList(new Value[] {arg}));
}
/**
Constructs a NewInterfaceInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public InterfaceInvokeExpr newInterfaceInvokeExpr(Local base, SootMethod method, Value arg)
{
return new JInterfaceInvokeExpr(base, method, Arrays.asList(new Value[] {arg}));
}
/**
Constructs a NewStaticInvokeExpr(ArrayType, List of Immediate) grammar chunk.
*/
public StaticInvokeExpr newStaticInvokeExpr(SootMethod method, Value arg1, Value arg2)
{
return new JStaticInvokeExpr(method, Arrays.asList(new Value[] {arg1, arg2}));
}
/**
Constructs a NewSpecialInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public SpecialInvokeExpr newSpecialInvokeExpr(Local base, SootMethod method, Value arg1, Value arg2)
{
return new JSpecialInvokeExpr(base, method, Arrays.asList(new Value[] {arg1, arg2}));
}
/**
Constructs a NewVirtualInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public VirtualInvokeExpr newVirtualInvokeExpr(Local base, SootMethod method, Value arg1, Value arg2)
{
return new JVirtualInvokeExpr(base, method, Arrays.asList(new Value[] {arg1, arg2}));
}
/**
Constructs a NewInterfaceInvokeExpr(Local base, SootMethod method, List of Immediate) grammar chunk.
*/
public InterfaceInvokeExpr newInterfaceInvokeExpr(Local base, SootMethod method, Value arg1, Value arg2)
{
return new JInterfaceInvokeExpr(base, method, Arrays.asList(new Value[] {arg1, arg2}));
}
/**
Constructs a ThrowStmt(Immediate) grammar chunk.
*/
public ThrowStmt newThrowStmt(Value op)
{
return new JThrowStmt(op);
}
/**
Constructs a ExitMonitorStmt(Immediate) grammar chunk
*/
public ExitMonitorStmt newExitMonitorStmt(Value op)
{
return new JExitMonitorStmt(op);
}
/**
Constructs a EnterMonitorStmt(Immediate) grammar chunk.
*/
public EnterMonitorStmt newEnterMonitorStmt(Value op)
{
return new JEnterMonitorStmt(op);
}
/**
Constructs a BreakpointStmt() grammar chunk.
*/
public BreakpointStmt newBreakpointStmt()
{
return new JBreakpointStmt();
}
/**
Constructs a GotoStmt(Stmt) grammar chunk.
*/
public GotoStmt newGotoStmt(Unit target)
{
return new JGotoStmt(target);
}
public GotoStmt newGotoStmt(UnitBox stmtBox)
{
return new JGotoStmt(stmtBox);
}
/**
Constructs a NopStmt() grammar chunk.
*/
public NopStmt newNopStmt()
{
return new JNopStmt();
}
/**
Constructs a ReturnVoidStmt() grammar chunk.
*/
public ReturnVoidStmt newReturnVoidStmt()
{
return new JReturnVoidStmt();
}
/**
Constructs a ReturnStmt(Immediate) grammar chunk.
*/
public ReturnStmt newReturnStmt(Value op)
{
return new JReturnStmt(op);
}
/**
Constructs a RetStmt(Local) grammar chunk.
*/
public RetStmt newRetStmt(Value stmtAddress)
{
return new JRetStmt(stmtAddress);
}
/**
Constructs a IfStmt(Condition, Stmt) grammar chunk.
*/
public IfStmt newIfStmt(Value condition, Unit target)
{
return new JIfStmt(condition, target);
}
public IfStmt newIfStmt(Value condition, UnitBox target)
{
return new JIfStmt(condition, target);
}
/**
Constructs a IdentityStmt(Local, IdentityRef) grammar chunk.
*/
public IdentityStmt newIdentityStmt(Value local, Value identityRef)
{
return new JIdentityStmt(local, identityRef);
}
/**
Constructs a AssignStmt(Variable, RValue) grammar chunk.
*/
public AssignStmt newAssignStmt(Value variable, Value rvalue)
{
return new JAssignStmt(variable, rvalue);
}
/**
Constructs a InvokeStmt(InvokeExpr) grammar chunk.
*/
public InvokeStmt newInvokeStmt(Value op)
{
return new JInvokeStmt(op);
}
/**
Constructs a TableSwitchStmt(Immediate, int, int, List of Unit, Stmt) grammar chunk.
*/
public TableSwitchStmt newTableSwitchStmt(Value key, int lowIndex, int highIndex, List targets, Unit defaultTarget)
{
return new JTableSwitchStmt(key, lowIndex, highIndex, targets, defaultTarget);
}
public TableSwitchStmt newTableSwitchStmt(Value key, int lowIndex, int highIndex, List targets, UnitBox defaultTarget)
{
return new JTableSwitchStmt(key, lowIndex, highIndex, targets, defaultTarget);
}
/**
Constructs a LookupSwitchStmt(Immediate, List of Immediate, List of Unit, Stmt) grammar chunk.
*/
public LookupSwitchStmt newLookupSwitchStmt(Value key, List lookupValues, List targets, Unit defaultTarget)
{
return new JLookupSwitchStmt(key, lookupValues, targets, defaultTarget);
}
public LookupSwitchStmt newLookupSwitchStmt(Value key, List lookupValues, List targets, UnitBox defaultTarget)
{
return new JLookupSwitchStmt(key, lookupValues, targets, defaultTarget);
}
/**
Constructs a Local with the given name and type.
*/
public Local newLocal(String name, Type t)
{
return new JimpleLocal(name, t);
}
/**
Constructs a new JTrap for the given exception on the given Stmt range with the given Stmt handler.
*/
public Trap newTrap(SootClass exception, Unit beginStmt, Unit endStmt, Unit handlerStmt)
{
return new JTrap(exception, beginStmt, endStmt, handlerStmt);
}
public Trap newTrap(SootClass exception, UnitBox beginStmt, UnitBox endStmt, UnitBox handlerStmt)
{
return new JTrap(exception, beginStmt, endStmt, handlerStmt);
}
/**
Constructs a StaticFieldRef(SootField) grammar chunk.
*/
public StaticFieldRef newStaticFieldRef(SootField f)
{
return new StaticFieldRef(f);
}
/**
Constructs a ThisRef(RefType) grammar chunk.
*/
public ThisRef newThisRef(RefType t)
{
return new ThisRef(t);
}
/**
Constructs a ParameterRef(SootMethod, int) grammar chunk.
*/
public ParameterRef newParameterRef(Type paramType, int number)
{
return new ParameterRef(paramType, number);
}
/**
Constructs a InstanceFieldRef(Local, SootField) grammar chunk.
*/
public InstanceFieldRef newInstanceFieldRef(Value base, SootField f)
{
return new JInstanceFieldRef(base, f);
}
/**
Constructs a CaughtExceptionRef() grammar chunk.
*/
public CaughtExceptionRef newCaughtExceptionRef()
{
return new JCaughtExceptionRef();
}
/**
Constructs a ArrayRef(Local, Immediate) grammar chunk.
*/
public ArrayRef newArrayRef(Value base, Value index)
{
return new JArrayRef(base, index);
}
// Note: This is NOT used to create the variable box in JAssignStmt.
public ValueBox newVariableBox(Value value)
{
return new VariableBox(value);
}
public ValueBox newLocalBox(Value value)
{
return new JimpleLocalBox(value);
}
// Note: This is NOT used to create the rvalue box in JAssignStmt.
public ValueBox newRValueBox(Value value)
{
return new RValueBox(value);
}
public ValueBox newImmediateBox(Value value)
{
return new ImmediateBox(value);
}
public ValueBox newArgBox(Value value)
{
return new ImmediateBox(value);
}
public ValueBox newIdentityRefBox(Value value)
{
return new IdentityRefBox(value);
}
public ValueBox newConditionExprBox(Value value)
{
return new ConditionExprBox(value);
}
public ValueBox newInvokeExprBox(Value value)
{
return new InvokeExprBox(value);
}
public UnitBox newStmtBox(Unit unit)
{
return new StmtBox((Stmt) unit);
}
/** Returns an empty JimpleBody associated with method m. */
public JimpleBody newBody(SootMethod m)
{
return new JimpleBody(m);
}
/** Returns an empty JimpleBody with no associated method. */
public JimpleBody newBody()
{
return new JimpleBody();
}
public StaticFieldRef newStaticFieldRef(SootFieldRef f) {
return newStaticFieldRef(f.resolve());
}
public InstanceFieldRef newInstanceFieldRef(Value base, SootFieldRef f) {
return newInstanceFieldRef(base, f.resolve());
}
public StaticInvokeExpr newStaticInvokeExpr(SootMethodRef method, List args) {
return newStaticInvokeExpr(method.resolve(), args);
}
public SpecialInvokeExpr newSpecialInvokeExpr(Local base, SootMethodRef method, List args) {
return newSpecialInvokeExpr(base, method.resolve(), args);
}
public VirtualInvokeExpr newVirtualInvokeExpr(Local base, SootMethodRef method, List args) {
return newVirtualInvokeExpr(base, method.resolve(), args);
}
public InterfaceInvokeExpr newInterfaceInvokeExpr(Local base, SootMethodRef method, List args) {
return newInterfaceInvokeExpr(base, method.resolve(), args);
}
public StaticInvokeExpr newStaticInvokeExpr(SootMethodRef method) {
return newStaticInvokeExpr(method.resolve());
}
public SpecialInvokeExpr newSpecialInvokeExpr(Local base, SootMethodRef method) {
return newSpecialInvokeExpr(base, method.resolve());
}
public VirtualInvokeExpr newVirtualInvokeExpr(Local base, SootMethodRef method) {
return newVirtualInvokeExpr(base, method.resolve());
}
public InterfaceInvokeExpr newInterfaceInvokeExpr(Local base, SootMethodRef method) {
return newInterfaceInvokeExpr(base, method.resolve());
}
public StaticInvokeExpr newStaticInvokeExpr(SootMethodRef method, Value arg) {
return newStaticInvokeExpr(method.resolve(), arg);
}
public SpecialInvokeExpr newSpecialInvokeExpr(Local base, SootMethodRef method, Value arg) {
return newSpecialInvokeExpr(base, method.resolve(), arg);
}
public VirtualInvokeExpr newVirtualInvokeExpr(Local base, SootMethodRef method, Value arg) {
return newVirtualInvokeExpr(base, method.resolve(), arg);
}
public InterfaceInvokeExpr newInterfaceInvokeExpr(Local base, SootMethodRef method, Value arg) {
return newInterfaceInvokeExpr(base, method.resolve(), arg);
}
public StaticInvokeExpr newStaticInvokeExpr(SootMethodRef method, Value arg1, Value arg2) {
return newStaticInvokeExpr(method.resolve(), arg1, arg2);
}
public SpecialInvokeExpr newSpecialInvokeExpr(Local base, SootMethodRef method, Value arg1, Value arg2) {
return newSpecialInvokeExpr(base, method.resolve(), arg1, arg2);
}
public VirtualInvokeExpr newVirtualInvokeExpr(Local base, SootMethodRef method, Value arg1, Value arg2) {
return newVirtualInvokeExpr(base, method.resolve(), arg1, arg2);
}
public InterfaceInvokeExpr newInterfaceInvokeExpr(Local base, SootMethodRef method, Value arg1, Value arg2) {
return newInterfaceInvokeExpr(base, method.resolve(), arg1, arg2);
}
} |
package gov.nih.nci.caadapter.ui.common.preferences;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.HashMap;
public class CaAdapterPref {
private static CaAdapterPref caAdapterPref=null;
private HashMap caAdapterPropHashMap = null;
public static String VALIDATION_PERFORMANCE_LEVLE_0="level 0";
public static String VALIDATION_PERFORMANCE_LEVLE_1="leve 1";
public static String VALIDATION_PERFORMANCE_LEVLE_2="level 2";
private HashMap getcaAdapterPref() {
return caAdapterPropHashMap;
}
public static synchronized HashMap getCaAdapterPreferences() {
if (caAdapterPref == null) {
caAdapterPref = new CaAdapterPref();
return caAdapterPref.getcaAdapterPref();
} else {
return caAdapterPref.getcaAdapterPref();
}
}
private CaAdapterPref() {
try {
FileInputStream f_out = new FileInputStream(System.getProperty("user.home") + "\\.caadapter");
ObjectInputStream obj_out = new ObjectInputStream(f_out);
caAdapterPropHashMap = (HashMap) obj_out.readObject();
} catch (Exception e) {
caAdapterPropHashMap = null;
e.printStackTrace();
}
}
public static synchronized void setCaAdapterPreferences(HashMap mapFromApplication) {
try {
FileOutputStream f_out = new FileOutputStream(System.getProperty("user.home") + "\\.caadapter");
ObjectOutputStream obj_out = new ObjectOutputStream(f_out);
obj_out.writeObject(mapFromApplication);
} catch (Exception e) {
e.printStackTrace();
}
}
}
/**
* Change History
* $Log: not supported by cvs2svn $
* Revision 1.3 2007/08/17 15:15:25 jayannah
* Reformatted and added the Comments and the log tags for all the files
*
*/ |
package com.subgraph.orchid.circuits.hs;
import java.io.StringReader;
import com.subgraph.orchid.TorParsingException;
import com.subgraph.orchid.crypto.TorSignature;
import com.subgraph.orchid.directory.DocumentFieldParserImpl;
import com.subgraph.orchid.directory.parsing.DocumentFieldParser;
import com.subgraph.orchid.directory.parsing.DocumentObject;
import com.subgraph.orchid.directory.parsing.DocumentParser;
import com.subgraph.orchid.directory.parsing.DocumentParsingHandler;
import com.subgraph.orchid.directory.parsing.DocumentParsingResultHandler;
import com.subgraph.orchid.encoders.Base64;
public class HSDescriptorParser implements DocumentParser<HSDescriptor>{
private final DocumentFieldParser fieldParser;
private final HSDescriptor descriptor;
private DocumentParsingResultHandler<HSDescriptor> resultHandler;
public HSDescriptorParser(DocumentFieldParser fieldParser) {
this.fieldParser = fieldParser;
this.fieldParser.setHandler(createParsingHandler());
this.descriptor = new HSDescriptor();
}
private DocumentParsingHandler createParsingHandler() {
return new DocumentParsingHandler() {
public void parseKeywordLine() {
processKeywordLine();
}
public void endOfDocument() {
}
};
}
public boolean parse(DocumentParsingResultHandler<HSDescriptor> resultHandler) {
this.resultHandler = resultHandler;
fieldParser.startSignedEntity();
try {
fieldParser.processDocument();
return true;
} catch(TorParsingException e) {
resultHandler.parsingError(e.getMessage());
return false;
}
}
private void processKeywordLine() {
final HSDescriptorKeyword keyword = HSDescriptorKeyword.findKeyword(fieldParser.getCurrentKeyword());
if(!keyword.equals(HSDescriptorKeyword.UNKNOWN_KEYWORD)) {
processKeyword(keyword);
}
}
private void processKeyword(HSDescriptorKeyword keyword) {
switch(keyword) {
case RENDEZVOUS_SERVICE_DESCRIPTOR:
descriptor.setDescriptorId(fieldParser.parseBase32Digest());
break;
case VERSION:
if(fieldParser.parseInteger() != 2) {
throw new TorParsingException("Unexpected Descriptor version");
}
break;
case PERMANENT_KEY:
descriptor.setPermanentKey(fieldParser.parsePublicKey());
break;
case SECRET_ID_PART:
descriptor.setSecretIdPart(fieldParser.parseBase32Digest());
break;
case PUBLICATION_TIME:
descriptor.setPublicationTime(fieldParser.parseTimestamp());
break;
case PROTOCOL_VERSIONS:
descriptor.setProtocolVersions(fieldParser.parseIntegerList());
break;
case INTRODUCTION_POINTS:
processIntroductionPoints();
break;
case SIGNATURE:
processSignature();
break;
case UNKNOWN_KEYWORD:
break;
}
}
private void processIntroductionPoints() {
final DocumentObject ob = fieldParser.parseObject();
final String decoded = new String(Base64.decode(ob.getContent(false)));
System.out.println(decoded);
final StringReader reader = new StringReader(decoded);
final IntroductionPointParser parser = new IntroductionPointParser(new DocumentFieldParserImpl(reader));
parser.parse(new DocumentParsingResultHandler<IntroductionPoint>() {
public void documentParsed(IntroductionPoint document) {
descriptor.addIntroductionPoint(document);
}
public void documentInvalid(IntroductionPoint document, String message) {
// TODO Auto-generated method stub
}
public void parsingError(String message) {
// TODO Auto-generated method stub
}
});
}
private void processSignature() {
fieldParser.endSignedEntity();
final TorSignature signature = fieldParser.parseSignature();
if(!fieldParser.verifySignedEntity(descriptor.getPermanentKey(), signature)) {
resultHandler.documentInvalid(descriptor, "Signature verification failed");
fieldParser.logWarn("Signature failed for descriptor: "+ descriptor.getDescriptorId().toBase32());
return;
}
System.out.println("SIGNATURE OK");
resultHandler.documentParsed(descriptor);
}
} |
package org.voltdb.iv2;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import org.voltcore.logging.Level;
import org.voltcore.messaging.Mailbox;
import org.voltdb.DependencyPair;
import org.voltdb.exceptions.EEException;
import org.voltdb.exceptions.SQLException;
import org.voltdb.LoadedProcedureSet;
import org.voltdb.messaging.FastDeserializer;
import org.voltdb.messaging.FragmentResponseMessage;
import org.voltdb.messaging.FragmentTaskMessage;
import org.voltdb.ParameterSet;
import org.voltdb.ProcedureRunner;
import org.voltdb.SiteProcedureConnection;
import org.voltdb.utils.LogKeys;
import org.voltdb.VoltDB;
import org.voltdb.VoltTable;
public class SysprocFragmentTask extends TransactionTask
{
final Mailbox m_initiator;
final FragmentTaskMessage m_task;
final LoadedProcedureSet m_loadedProcSet;
SysprocFragmentTask(Mailbox mailbox,
ParticipantTransactionState txn,
FragmentTaskMessage message,
LoadedProcedureSet procs)
{
super(txn);
m_initiator = mailbox;
m_task = message;
m_loadedProcSet = procs;
assert(m_task.isSysProcTask());
}
@Override
public void run(SiteProcedureConnection siteConnection)
{
if (!m_txn.isReadOnly()) {
if (m_txn.getBeginUndoToken() == Site.kInvalidUndoToken) {
m_txn.setBeginUndoToken(siteConnection.getLatestUndoToken());
}
}
final FragmentResponseMessage response = processFragmentTask(siteConnection);
m_initiator.deliver(response);
}
// Extracted the sysproc portion of ExecutionSite processFragmentTask(), then
// modifed to work in the new world
public FragmentResponseMessage processFragmentTask(SiteProcedureConnection siteConnection)
{
final FragmentResponseMessage currentFragResponse =
new FragmentResponseMessage(m_task, m_initiator.getHSId());
currentFragResponse.setStatus(FragmentResponseMessage.SUCCESS, null);
for (int frag = 0; frag < m_task.getFragmentCount(); frag++)
{
final long fragmentId = m_task.getFragmentId(frag);
final int outputDepId = m_task.getOutputDepId(frag);
// TODO: copy-paste from FragmentTask. DRY / helper needed.
// this is a horrible performance hack, and can be removed with small changes
// to the ee interface layer.. (rtb: not sure what 'this' encompasses...)
// (izzy: still not sure what 'this' encompasses...)
ParameterSet params = null;
final ByteBuffer paramData = m_task.getParameterDataForFragment(frag);
if (paramData != null) {
final FastDeserializer fds = new FastDeserializer(paramData);
try {
params = fds.readObject(ParameterSet.class);
}
catch (final IOException e) {
// IZZY: why not send a non-success response back to the
// MPI here?
hostLog.l7dlog(Level.FATAL,
LogKeys.host_ExecutionSite_FailedDeserializingParamsForFragmentTask.name(), e);
VoltDB.crashLocalVoltDB(e.getMessage(), true, e);
}
}
else {
params = new ParameterSet();
}
try {
// Find the sysproc to invoke.
ProcedureRunner runner = m_loadedProcSet.getSysproc(fragmentId);
// run the overloaded sysproc planfragment. pass an empty dependency
// set since remote (non-aggregator) fragments don't receive dependencies.
final DependencyPair dep
= runner.executePlanFragment(m_txn,
new HashMap<Integer, List<VoltTable>>(),
fragmentId,
params);
System.out.printf("SYSPROCFRAG: outputDepId(%d) depId(%d) table(%s)\n",
outputDepId, dep.depId, dep.dependency);
currentFragResponse.addDependency(dep.depId, dep.dependency);
} catch (final EEException e) {
hostLog.l7dlog( Level.TRACE, LogKeys.host_ExecutionSite_ExceptionExecutingPF.name(), new Object[] { fragmentId }, e);
currentFragResponse.setStatus(FragmentResponseMessage.UNEXPECTED_ERROR, e);
break;
} catch (final SQLException e) {
hostLog.l7dlog( Level.TRACE, LogKeys.host_ExecutionSite_ExceptionExecutingPF.name(), new Object[] { fragmentId }, e);
currentFragResponse.setStatus(FragmentResponseMessage.UNEXPECTED_ERROR, e);
break;
}
}
return currentFragResponse;
}
@Override
public long getMpTxnId()
{
return m_task.getTxnId();
}
} |
package edu.ucdenver.ccp.nlp.uima.serialization.xmi;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.zip.GZIPInputStream;
import org.apache.uima.UimaContext;
import org.apache.uima.analysis_engine.AnalysisEngine;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.cas.impl.XmiCasDeserializer;
import org.apache.uima.cas.impl.XmiSerializationSharedData;
import org.apache.uima.fit.component.JCasAnnotator_ImplBase;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.factory.AnalysisEngineFactory;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
import org.apache.uima.resource.metadata.TypeSystemDescription;
import org.apache.uima.util.Level;
import org.apache.uima.util.Logger;
import org.xml.sax.SAXException;
import edu.ucdenver.ccp.common.file.CharacterEncoding;
import edu.ucdenver.ccp.common.io.ClassPathUtil;
import edu.ucdenver.ccp.common.io.StreamUtil;
import edu.ucdenver.ccp.common.reflection.ConstructorUtil;
import edu.ucdenver.ccp.common.string.StringConstants;
import edu.ucdenver.ccp.nlp.uima.shims.ShimDefaults;
import edu.ucdenver.ccp.uima.shims.document.DocumentMetadataHandler;
/**
* This {@link AnalysisEngine} implementation is capable of loading UIMA XMI
* files. File can be loaded from the file system or classpath. Compressed (.gz)
* XMI files can be used.
*
* @author Colorado Computational Pharmacology, UC Denver;
* ccpsupport@ucdenver.edu
*
*/
public class XmiLoaderAE extends JCasAnnotator_ImplBase {
/**
* The XmiLoaderAE can load XMI files from either the classpath or the
* filesystem. This enum is used to indicate the location of the XMI files
* to load.
*
* @author Colorado Computational Pharmacology, UC Denver;
* ccpsupport@ucdenver.edu
*
*/
public enum XmiPathType {
/**
* Indicates the XMI files to load are on the classpath, therefore the
* xmiDirectoriesOrPaths input parameter represents paths on the
* classpath
*/
CLASSPATH,
/**
* Indicates the XMI files to load are on the file system, therefore the
* xmiDirectoriesOrPaths input parameter represents a file system
* directory
*/
FILESYSTEM
}
public static final String PARAM_XMI_PATH_TYPE = "xmiPathType";
@ConfigurationParameter(mandatory = true, description = "Indicates the path type for the values in the xmiPaths configuration parameter, CLASSPATH or FILESYSTEM")
private XmiPathType xmiPathType;
public enum XmiFileCompressionType {
/**
* Indicates that the XMI files are gzipped
*/
GZ,
/**
* Indicates that the XMI files are not compressed
*/
NONE
}
public static final String PARAM_XMI_FILE_COMPRESSION_TYPE = "xmiFileCompressionType";
@ConfigurationParameter(defaultValue = "NONE", description = "Indicates the compression type used to store the XMI files, GZ or NONE. This has ramifications on whether they are looked for using a .gz suffix or note, and how they are loaded.")
private XmiFileCompressionType xmiFileCompressionType;
/**
* Parameter name (mainly used in descriptor files) for the XMI input
* directory configuration parameter
*/
public static final String PARAM_XMI_PATH_NAMES = "xmiPaths";
/**
* The directory where XMI files to load are stored
*/
@ConfigurationParameter(mandatory = false, description = "The directory or classpath where the XMI files to load are stored. "
+ "If not set, then this AnalysisEngine will attempt to obtain the directory of the source file for this CAS. "
+ "See DocumentMetadataExtractor for details.")
private String[] xmiPaths;
public static final String PARAM_XMI_FILENAME_INFIXES = "infixes";
@ConfigurationParameter(mandatory = false, description = "An optional array of infixes to use when specifying which XMI files to load.")
private String[] infixes;
/**
* Parameter name used in the UIMA descriptor file for the token attribute
* extractor implementation to use
*/
public static final String PARAM_DOCUMENT_METADATA_HANDLER_CLASS = "documentMetadataHandlerClassName";
/**
* The name of the DocumentMetadataHandler implementation to use
*/
@ConfigurationParameter(mandatory = true, description = "name of the DocumentMetadataHandler implementation to use", defaultValue = ShimDefaults.CCP_DOCUMENT_METADATA_HANDLER_CLASS_NAME)
private String documentMetadataHandlerClassName;
/**
* this {@link DocumentMetadataHandler} will be initialized based on the
* class name specified by the documentMetadataExtractorClassName parameter
*/
private DocumentMetadataHandler documentMetaDataHandler;
private Logger logger;
/**
* If true, then an exception is thrown if an unknown UIMA type is observed
* during the deserialization process
*/
boolean THROW_EXCEPTION_ON_UNKNOWN_TYPE_OBSERVATION = true;
/*
* (non-Javadoc)
*
* @see
* org.apache.uima.analysis_component.AnalysisComponent_ImplBase#initialize(
* org.apache.uima. UimaContext)
*/
@Override
public void initialize(UimaContext aContext) throws ResourceInitializationException {
super.initialize(aContext);
logger = aContext.getLogger();
System.out.println("DOCUMENT META DATA EXTRACTOR CLASS: " + documentMetadataHandlerClassName);
documentMetaDataHandler = (DocumentMetadataHandler) ConstructorUtil
.invokeConstructor(documentMetadataHandlerClassName);
}
/*
* (non-Javadoc)
*
* @see
* org.apache.uima.analysis_component.JCasAnnotator_ImplBase#process(org.
* apache.uima.jcas.JCas)
*/
@Override
public void process(JCas jcas) throws AnalysisEngineProcessException {
XmiSerializationSharedData sharedData = new XmiSerializationSharedData();
String documentId = documentMetaDataHandler.extractDocumentId(jcas);
List<String> filePaths = new ArrayList<String>();
if (xmiPaths != null) {
filePaths.addAll(Arrays.asList(xmiPaths));
} else {
if (xmiPathType == XmiPathType.FILESYSTEM) {
/*
* extract the source document path from the CAS and use that as
* the directory to load from
*/
File sourceDocumentPath = documentMetaDataHandler.extractSourceDocumentPath(jcas);
if (sourceDocumentPath != null) {
filePaths.add(sourceDocumentPath.getParentFile().getAbsolutePath());
}
}
}
if (filePaths.isEmpty()) {
throw new AnalysisEngineProcessException(
"Unable to determine location path for XMI file loading. Parameters are: xmiPathType: "
+ xmiPathType.name() + " xmiPaths: "
+ ((xmiPaths == null) ? "null" : Arrays.toString(xmiPaths)) + " Source document path: "
+ documentMetaDataHandler.extractSourceDocumentPath(jcas),
null);
}
List<String> infixesToLoad = new ArrayList<String>();
if (infixes == null || infixes.length == 0) {
infixesToLoad.add(null);
} else {
infixesToLoad.addAll(Arrays.asList(infixes));
}
for (String xmiPathBase : filePaths) {
for (String infix : infixesToLoad) {
InputStream xmiStream = initializeXmiInputStream(documentId, xmiPathBase, infix);
if (xmiStream != null) {
try {
XmiCasDeserializer.deserialize(xmiStream, jcas.getCas(),
!THROW_EXCEPTION_ON_UNKNOWN_TYPE_OBSERVATION, sharedData, sharedData.getMaxXmiId());
xmiStream.close();
} catch (IOException e) {
throw new AnalysisEngineProcessException(e);
} catch (SAXException e) {
throw new AnalysisEngineProcessException(e);
}
}
}
}
}
/**
* @param documentId
* @param xmiPathBase
* @return
* @throws AnalysisEngineProcessException
*/
private InputStream initializeXmiInputStream(String documentId, String xmiPathBase, String infix)
throws AnalysisEngineProcessException {
InputStream xmiStream = null;
if (xmiPathType.equals(XmiPathType.FILESYSTEM)) {
xmiStream = getStreamFromFile(documentId, xmiPathBase, infix);
} else {
xmiStream = getStreamFromClasspath(documentId, xmiPathBase, infix);
}
if (xmiFileCompressionType.equals(XmiFileCompressionType.GZ)) {
try {
xmiStream = new GZIPInputStream(xmiStream);
} catch (IOException e) {
throw new AnalysisEngineProcessException(e);
}
}
return xmiStream;
}
/**
* @param documentId
* @param xmiPathBase
* @return
*/
private InputStream getStreamFromClasspath(String documentId, String xmiPathBase, String infix) {
InputStream xmiStream = null;
String xmiFilePath = xmiPathBase + StringConstants.FORWARD_SLASH
+ XmiPrinterAE.getXmiFileName(documentId, infix, xmiFileCompressionType == XmiFileCompressionType.GZ);
xmiStream = ClassPathUtil.getResourceStreamFromClasspath(getClass(), xmiFilePath);
if (xmiStream == null) {
logger.log(Level.WARNING, "Unable to load XMI file from classpath: " + xmiFilePath);
}
return xmiStream;
}
/**
* @param documentId
* @param xmiPathBase
* @param xmiStream
* @return
* @throws AnalysisEngineProcessException
*/
private InputStream getStreamFromFile(String documentId, String xmiPathBase, String infix)
throws AnalysisEngineProcessException {
InputStream xmiStream = null;
File xmiDirectory = new File(xmiPathBase);
String xmiFileName = XmiPrinterAE.getXmiFileName(documentId, infix,
(xmiFileCompressionType == XmiFileCompressionType.GZ));
File xmiFile = new File(xmiDirectory, xmiFileName);
if (!xmiFile.exists()) {
logger.log(Level.WARNING, "Expected XMI file does not exist: " + xmiFile.getAbsolutePath());
} else {
try {
xmiStream = StreamUtil.getEncodingSafeInputStream(xmiFile, CharacterEncoding.UTF_8);
} catch (FileNotFoundException e) {
throw new AnalysisEngineProcessException(e);
}
}
return xmiStream;
}
/**
* Returns an initialized XmiLoader {@link AnalysisEngine}
*
* @param documentMetaDataHandlerClass
* @param tsd
* @param xmiPathType
* @param xmiDirectories
* @return
* @throws ResourceInitializationException
*/
public static AnalysisEngineDescription createAnalysisEngineDescription(TypeSystemDescription tsd,
Class<? extends DocumentMetadataHandler> documentMetaDataHandlerClass, XmiPathType xmiPathType,
XmiFileCompressionType xmiCompressionType, List<String> infixes, String... xmiPaths)
throws ResourceInitializationException {
return AnalysisEngineFactory.createEngineDescription(XmiLoaderAE.class, tsd,
XmiLoaderAE.PARAM_DOCUMENT_METADATA_HANDLER_CLASS, documentMetaDataHandlerClass.getName(),
PARAM_XMI_PATH_TYPE, xmiPathType.name(), PARAM_XMI_FILE_COMPRESSION_TYPE, xmiCompressionType.name(),
XmiLoaderAE.PARAM_XMI_PATH_NAMES, xmiPaths, XmiLoaderAE.PARAM_XMI_FILENAME_INFIXES,
(infixes == null) ? null : infixes.toArray(new String[infixes.size()]));
}
public static AnalysisEngineDescription createAnalysisEngineDescription_LoadFromSourceFileDirectory(
TypeSystemDescription tsd, Class<? extends DocumentMetadataHandler> documentMetaDataHandlerClass,
XmiFileCompressionType xmiCompressionType, String... infixes) throws ResourceInitializationException {
return AnalysisEngineFactory.createEngineDescription(XmiLoaderAE.class, tsd,
XmiLoaderAE.PARAM_DOCUMENT_METADATA_HANDLER_CLASS, documentMetaDataHandlerClass.getName(),
PARAM_XMI_PATH_TYPE, XmiPathType.FILESYSTEM, PARAM_XMI_FILE_COMPRESSION_TYPE, xmiCompressionType.name(),
XmiLoaderAE.PARAM_XMI_FILENAME_INFIXES, infixes);
}
} |
package org.voltdb.utils;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.LinkedBlockingQueue;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.http.entity.ContentType;
import org.eclipse.jetty.http.HttpVersion;
import org.eclipse.jetty.security.ConstraintMapping;
import org.eclipse.jetty.security.ConstraintSecurityHandler;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.SecureRequestCustomizer;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.util.security.Constraint;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.voltcore.logging.VoltLogger;
import org.voltdb.HTTPClientInterface;
import org.voltdb.VoltDB;
import com.google_voltpatches.common.base.Charsets;
import com.google_voltpatches.common.io.Resources;
import com.google_voltpatches.common.net.HostAndPort;
import java.util.HashSet;
import java.util.Set;
import javax.servlet.SessionTrackingMode;
import org.eclipse.jetty.server.handler.ContextHandler;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.server.session.DefaultSessionIdManager;
import org.eclipse.jetty.server.session.SessionHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHandler;
public class HTTPAdminListener {
private static final VoltLogger m_log = new VoltLogger("HOST");
// static resources
public static final String RESOURCE_BASE = "dbmonitor";
public static final String CSS_TARGET = "css";
public static final String IMAGES_TARGET = "images";
public static final String JS_TARGET = "js";
// content types
static final String JSON_CONTENT_TYPE = ContentType.APPLICATION_JSON.toString();
static final String HTML_CONTENT_TYPE = "text/html;charset=utf-8";
final Server m_server;
final DefaultSessionIdManager m_idmanager;
final SessionHandler m_sessionHandler = new SessionHandler();
final HTTPClientInterface httpClientInterface = new HTTPClientInterface();
boolean m_jsonEnabled;
Map<String, String> m_htmlTemplates = new HashMap<>();
final boolean m_mustListen;
String m_publicIntf;
/*
* Utility handler class to enable caching of static resources.
* The static resources are package in jar file
*/
class CacheStaticResourceHandler extends ResourceHandler {
// target Directory location for folder w.r.t. resource base folder - dbmonitor
public CacheStaticResourceHandler(final String target, int maxAge) {
super();
final String path = VoltDB.class.getResource(RESOURCE_BASE + File.separator + target).toExternalForm();
if (m_log.isDebugEnabled()) {
m_log.debug("Resource base path: " + path);
}
setResourceBase(path);
// set etags along with cache age so that the http client's requests for fetching the
// static resource is rate limited. Without cache age, client will requesting for
// static more than needed
setCacheControl("max-age=" + maxAge +", private");
setEtags(true);
}
@SuppressWarnings("unused")
private CacheStaticResourceHandler() {
super();
assert false : "Target location for static resource is needed to initialize the resource handler";
}
@Override
public void handle(String target,
Request baseRequest,
HttpServletRequest request,
HttpServletResponse response)
throws IOException, ServletException {
super.handle(target, baseRequest, request, response);
if (!baseRequest.isHandled() && m_log.isDebugEnabled()) {
m_log.debug("Failed to process static resource: " + Paths.get(getResourceBase()));
}
}
}
/**
* Load a template for the admin page, fill it out and return the value.
* @param params The key-value set of variables to replace in the template.
* @return The completed template.
*/
String getHTMLForAdminPage(Map<String,String> params) {
try {
String template = m_htmlTemplates.get("admintemplate.html");
for (Entry<String, String> e : params.entrySet()) {
String key = e.getKey().toUpperCase();
String value = e.getValue();
if (key == null) continue;
if (value == null) value = "NULL";
template = template.replace("#" + key + "#", value);
}
return template;
}
catch (Exception e) {
e.printStackTrace();
}
return "<html><body>An unrecoverable error was encountered while generating this page.</body></html>";
}
private void loadTemplate(Class<?> clz, String name) throws Exception {
URL url = Resources.getResource(clz, name);
String contents = Resources.toString(url, Charsets.UTF_8);
m_htmlTemplates.put(name, contents);
}
public HTTPAdminListener(
boolean jsonEnabled, String intf, String publicIntf, int port,
SslContextFactory sslContextFactory, boolean mustListen
) throws Exception {
int poolsize = Integer.getInteger("HTTP_POOL_SIZE", 50);
int timeout = Integer.getInteger("HTTP_REQUEST_TIMEOUT_SECONDS", 15);
int cacheMaxAge = Integer.getInteger("HTTP_STATIC_CACHE_MAXAGE", 24*60*60); // 24 hours
String resolvedIntf = intf == null ? "" : intf.trim().isEmpty() ? ""
: HostAndPort.fromHost(intf).withDefaultPort(port).toString();
m_publicIntf = publicIntf == null ? resolvedIntf : publicIntf.trim().isEmpty() ? resolvedIntf
: HostAndPort.fromHost(publicIntf).withDefaultPort(port).toString();
/*
* Don't force us to look at a huge pile of threads
*/
final QueuedThreadPool qtp = new QueuedThreadPool(
poolsize,
1, // minimum threads
timeout * 1000,
new LinkedBlockingQueue<>(poolsize + 16)
);
m_server = new Server(qtp);
m_server.setAttribute(
"org.eclipse.jetty.server.Request.maxFormContentSize",
new Integer(HTTPClientInterface.MAX_QUERY_PARAM_SIZE)
);
// Inactivity timeout defaults to 30 secs, but can be overridden by environment variable
m_sessionHandler.setMaxInactiveInterval(HTTPClientInterface.MAX_SESSION_INACTIVITY_SECONDS);
m_idmanager = new HttpSessionIdManager(m_server);
m_server.setSessionIdManager(m_idmanager);
m_idmanager.setWorkerName("vmc");
m_sessionHandler.setSessionIdManager(m_idmanager);
m_sessionHandler.setServer(m_server);
m_mustListen = mustListen;
// PRE-LOAD ALL HTML TEMPLATES (one for now)
try {
loadTemplate(HTTPAdminListener.class, "admintemplate.html");
}
catch (Exception e) {
VoltLogger logger = new VoltLogger("HOST");
logger.error("Unable to load HTML templates from jar for admin pages.", e);
throw e;
}
// NOW START SocketConnector and create Jetty server but dont start.
ServerConnector connector = null;
boolean useSecure = false;
try {
if (sslContextFactory == null) { // basic HTTP
// The socket channel connector seems to be faster for our use
//SelectChannelConnector connector = new SelectChannelConnector();
connector = new ServerConnector(m_server);
if (intf != null && !intf.trim().isEmpty()) {
connector.setHost(intf);
}
connector.setPort(port);
connector.setName("VoltDB-HTTPD");
//open the connector here so we know if port is available and Init work can retry with next port.
connector.open();
m_server.addConnector(connector);
} else { // HTTPS
useSecure = true;
m_server.addConnector(getSSLServerConnector(sslContextFactory, intf, port));
}
ServletContextHandler rootContext = new ServletContextHandler(ServletContextHandler.SESSIONS);
ServletHandler servlets = rootContext.getServletHandler();
// the default is 200k which well short of out 2M row size limit
rootContext.setMaxFormContentSize(HTTPClientInterface.MAX_QUERY_PARAM_SIZE);
// close another attack vector where potentially one may send a large number of keys
rootContext.setMaxFormKeys(HTTPClientInterface.MAX_FORM_KEYS);
rootContext.getSessionHandler().getSessionCookieConfig().setHttpOnly(true);
//Only use cookie mode and dont support URL
Set<SessionTrackingMode> trackModes = new HashSet<>();
trackModes.add(SessionTrackingMode.COOKIE);
rootContext.getSessionHandler().setSessionTrackingModes(trackModes);
if (useSecure) {
//Make cookie secure when using SSL
rootContext.getSessionHandler().getSessionCookieConfig().setSecure(useSecure);
}
ContextHandler cssResourceHandler = new ContextHandler("/css");
ResourceHandler cssResource = new CacheStaticResourceHandler(CSS_TARGET, cacheMaxAge);
cssResource.setDirectoriesListed(false);
cssResourceHandler.setHandler(cssResource);
ContextHandler imageResourceHandler = new ContextHandler("/images");
ResourceHandler imagesResource = new CacheStaticResourceHandler(IMAGES_TARGET, cacheMaxAge);
imagesResource.setDirectoriesListed(false);
imageResourceHandler.setHandler(imagesResource);
ContextHandler jsResourceHandler = new ContextHandler("/js");
ResourceHandler jsResource = new CacheStaticResourceHandler(JS_TARGET, cacheMaxAge);
jsResource.setDirectoriesListed(false);
jsResourceHandler.setHandler(jsResource);
//Add all to a collection which will be wrapped by GzipHandler we set GzipHandler to the server.
ContextHandlerCollection handlers = new ContextHandlerCollection();
handlers.addHandler(disableTraceMethodForHandler(rootContext));
handlers.addHandler(disableTraceMethodForHandler(cssResourceHandler));
handlers.addHandler(disableTraceMethodForHandler(imageResourceHandler));
handlers.addHandler(disableTraceMethodForHandler(jsResourceHandler));
GzipHandler compressResourcesHandler = new GzipHandler();
compressResourcesHandler.setHandler(handlers);
compressResourcesHandler.addExcludedMimeTypes(JSON_CONTENT_TYPE);
compressResourcesHandler.setIncludedMimeTypes("application/x-javascript", "text/css" ,
"image/gif", "image/png", "image/jpeg", HTML_CONTENT_TYPE);
compressResourcesHandler.setServer(m_server);
m_server.setHandler(compressResourcesHandler);
//Following are the servelets jetty is configured with see URL pattern for what they handle.
servlets.addServletWithMapping(DBMonitorServlet.class, "/").setAsyncSupported(true); |
package edu.duke.cabig.c3pr.web.registration;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.validation.BindException;
import org.springframework.validation.Errors;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.util.WebUtils;
import edu.duke.cabig.c3pr.domain.Epoch;
import edu.duke.cabig.c3pr.domain.Identifier;
import edu.duke.cabig.c3pr.domain.ScheduledEpoch;
import edu.duke.cabig.c3pr.domain.StudySubject;
import edu.duke.cabig.c3pr.utils.StringUtils;
import edu.duke.cabig.c3pr.utils.web.ControllerTools;
import edu.duke.cabig.c3pr.web.registration.tabs.ManageCompanionRegistrationTab;
import edu.duke.cabig.c3pr.web.registration.tabs.RegistrationOverviewTab;
import edu.duke.cabig.c3pr.xml.XmlMarshaller;
import gov.nih.nci.cabig.ctms.web.tabs.Flow;
import gov.nih.nci.cabig.ctms.web.tabs.Tab;
public class ManageRegistrationController<C extends StudySubjectWrapper> extends RegistrationController<C> {
private XmlMarshaller xmlUtility;
public XmlMarshaller getXmlUtility() {
return xmlUtility;
}
public void setXmlUtility(XmlMarshaller xmlUtility) {
this.xmlUtility = xmlUtility;
}
public ManageRegistrationController() {
super("Manage Registration");
}
@Override
protected void intializeFlows(Flow flow) {
flow.addTab(new RegistrationOverviewTab<StudySubjectWrapper>());
setFlow(flow);
}
@Override
protected ModelAndView handleRequestInternal(HttpServletRequest request,
HttpServletResponse response) throws Exception {
if (request.getParameterMap().keySet().contains("_action")
&& StringUtils.getBlankIfNull(request.getParameter("_action"))
.equalsIgnoreCase("export")) {
response.reset();
StudySubjectWrapper wrapper= (StudySubjectWrapper) currentFormObject(request,request.getSession().getAttribute(getFormSessionAttributeName()));
StudySubject studySubject = wrapper.getStudySubject();
response.setContentType("application/xml");
String fileName = "registration-"+ studySubject.getId() + ".xml" ;
response.setHeader("Content-Disposition", "attachment; filename="+fileName);
xmlUtility.toXML(studySubject, response.getWriter());
response.getWriter().close();
return null;
}
return super.handleRequestInternal(request, response);
}
@Override
protected ModelAndView processFinish(HttpServletRequest request, HttpServletResponse response, Object command, BindException errors) throws Exception {
// this condition is added to transfer epoch, if epoch doesn't require additional input from the user.
if(WebUtils.hasSubmitParameter(request, "epoch")){
StudySubjectWrapper wrapper= (StudySubjectWrapper)command ;
StudySubject studySubject = wrapper.getStudySubject();
ScheduledEpoch scheduledEpoch;
Integer id = Integer.parseInt(request.getParameter("epoch"));
Epoch epoch = epochDao.getById(id);
epochDao.initialize(epoch);
if (epoch.getTreatmentIndicator()) {
(epoch).getArms().size();
scheduledEpoch = new ScheduledEpoch();
}
else {
scheduledEpoch = new ScheduledEpoch();
}
scheduledEpoch.setEpoch(epoch);
scheduledEpoch.setEligibilityIndicator(registrationControllerUtils.evaluateEligibilityIndicator(studySubject));
scheduledEpoch.setScEpochDataEntryStatus(scheduledEpoch.evaluateScheduledEpochDataEntryStatus(scheduledEpoch.getStratumGroupNumber()));
studySubject.addScheduledEpoch(scheduledEpoch);
registrationControllerUtils.buildCommandObject(studySubject);
studySubjectDao.initialize(studySubject);
if(wrapper.getShouldTransfer())
studySubject = studySubjectRepository.transferSubject(studySubject);
else if(wrapper.getShouldEnroll()){
studySubject=studySubjectRepository.enroll(studySubject);
}else if(wrapper.getShouldRegister()){
studySubject=studySubjectRepository.register(studySubject.getIdentifiers());
}else if(wrapper.getShouldReserve()){
studySubject=studySubjectRepository.reserve(studySubject.getIdentifiers());
}else{
studySubject=studySubjectRepository.save(studySubject);
}
return new ModelAndView("redirect:confirm?" + ControllerTools.createParameterString(studySubject.getSystemAssignedIdentifiers().get(0)));
}
return null;
}
@Override
protected C save(C command, Errors arg1) {
StudySubject merged = (StudySubject) getDao().merge(getPrimaryDomainObject(command));
studySubjectDao.initialize(merged);
command.setStudySubject(merged);
return command;
}
@Override
protected boolean isNextPageSavable(HttpServletRequest request, C command, Tab<C> tab) {
return false;
}
// @Override
// protected void postProcessPage(HttpServletRequest request, Object command,Errors errors, int page) throws Exception {
// super.postProcessPage(request, command, errors, page);
// StudySubjectWrapper wrapper = (StudySubjectWrapper)command ;
// Identifier identifier=ControllerTools.getIdentifierInRequest(request);
// if(identifier != null){
// List<Identifier> identifiers=new ArrayList<Identifier>();
// identifiers.add(identifier);
// StudySubject studySubject=studySubjectRepository.getUniqueStudySubjects(identifiers);
// studySubjectDao.initialize(studySubject);
// wrapper.setStudySubject(studySubject);
} |
/*
* (e-mail:zhongxunking@163.com)
*/
/*
* :
* @author 2017-09-13 15:17
*/
package org.antframework.configcenter.client.support;
import org.antframework.configcenter.client.ConfigListener;
import org.antframework.configcenter.client.core.ChangedProperty;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
public class ListenerRegistrar {
private List<ConfigListener> listeners = new CopyOnWriteArrayList<>();
public void register(ConfigListener listener) {
listeners.add(listener);
}
public void unregister(ConfigListener listener) {
listeners.remove(listener);
}
void onChange(List<ChangedProperty> changedProperties) {
if (changedProperties == null || changedProperties.size() <= 0) {
return;
}
for (ConfigListener listener : listeners) {
listener.onChange(changedProperties);
}
}
} |
package org.jboss.as.core.model.test.util;
import java.util.ArrayList;
import java.util.List;
import org.jboss.as.controller.ModelVersion;
import org.jboss.as.model.test.EAPRepositoryReachableUtil;
import org.jboss.as.model.test.ModelTestControllerVersion;
/**
*
* @author <a href="kabir.khan@jboss.com">Kabir Khan</a>
*/
public class TransformersTestParameters {
private final ModelVersion modelVersion;
private final ModelTestControllerVersion testControllerVersion;
public TransformersTestParameters(ModelVersion modelVersion, ModelTestControllerVersion testControllerVersion) {
this.modelVersion = modelVersion;
this.testControllerVersion = testControllerVersion;
}
protected TransformersTestParameters(TransformersTestParameters delegate) {
this(delegate.getModelVersion(), delegate.getTestControllerVersion());
}
public ModelVersion getModelVersion() {
return modelVersion;
}
public ModelTestControllerVersion getTestControllerVersion() {
return testControllerVersion;
}
public static List<Object[]> setupVersions(){
List<Object[]> data = new ArrayList<Object[]>();
//AS releases
data.add(new Object[] {new TransformersTestParameters(ModelVersion.create(1, 2, 0), ModelTestControllerVersion.V7_1_2_FINAL)});
data.add(new Object[] {new TransformersTestParameters(ModelVersion.create(1, 3, 0), ModelTestControllerVersion.V7_1_3_FINAL)});
data.add(new Object[] {new TransformersTestParameters(ModelVersion.create(1, 4, 0), ModelTestControllerVersion.V7_2_0_FINAL)});
data.add(new Object[] {new TransformersTestParameters(ModelVersion.create(2, 0, 0), ModelTestControllerVersion.MASTER)});
//EAP releases
if (EAPRepositoryReachableUtil.isReachable()) {
data.add(new Object[] {new TransformersTestParameters(ModelVersion.create(1, 2, 0), ModelTestControllerVersion.EAP_6_0_0)});
data.add(new Object[] {new TransformersTestParameters(ModelVersion.create(1, 3, 0), ModelTestControllerVersion.EAP_6_0_1)});
data.add(new Object[] {new TransformersTestParameters(ModelVersion.create(1, 4, 0), ModelTestControllerVersion.EAP_6_1_0)});
data.add(new Object[] {new TransformersTestParameters(ModelVersion.create(1, 4, 0), ModelTestControllerVersion.EAP_6_1_1)});
}
for (int i = 0 ; i < data.size() ; i++) {
Object[] entry = data.get(i);
System.out.println("Parameter " + i + ": " + entry[0]);
}
return data;
}
@Override
public String toString() {
return "TransformersTestParameters={modelVersion=" + modelVersion + "; testControllerVersion=" + testControllerVersion + "}";
}
} |
package org.eclipse.birt.core.internal.function.impl;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.birt.core.data.DataTypeUtil;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.core.framework.IConfigurationElement;
import org.eclipse.birt.core.framework.IExtension;
import org.eclipse.birt.core.framework.IExtensionPoint;
import org.eclipse.birt.core.framework.IExtensionRegistry;
import org.eclipse.birt.core.framework.Platform;
import org.eclipse.birt.core.script.functionservice.IScriptFunction;
import org.eclipse.birt.core.script.functionservice.IScriptFunctionArgument;
import org.eclipse.birt.core.script.functionservice.IScriptFunctionCategory;
import org.eclipse.birt.core.script.functionservice.IScriptFunctionFactory;
import org.eclipse.birt.core.script.functionservice.impl.Argument;
import org.eclipse.birt.core.script.functionservice.impl.Category;
import org.eclipse.birt.core.script.functionservice.impl.CategoryWrapper;
import org.eclipse.birt.core.script.functionservice.impl.IFunctionProvider;
import org.eclipse.birt.core.script.functionservice.impl.ScriptFunction;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.Script;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import org.osgi.framework.Bundle;
public class FunctionProviderImpl implements IFunctionProvider
{
//The extension constants
private static final String EXTENSION_POINT = "org.eclipse.birt.core.ScriptFunctionService";
private static final String ELEMENT_CATEGORY = "Category";
private static final String ELEMENT_FUNCTION = "Function";
private static final String ELEMENT_ARGUMENT = "Argument";
private static final String ELEMENT_JSLIB = "JSLib";
private static final String ELEMENT_DATATYPE = "DataType";
private static final String ATTRIBUTE_NAME = "name";
private static final String ATTRIBUTE_DESC = "desc";
private static final String ATTRIBUTE_FACTORYCLASS = "factoryclass";
private static final String ATTRIBUTE_VALUE = "value";
private static final String ATTRIBUTE_ISOPTIONAL = "isOptional";
private static final String ATTRIBUTE_ALLOWVARARGUMENT = "variableArguments";
private static final String ATTRIBUTE_ISSTATIC="isStatic";
private static final String ATTRIBUTE_ISCONSTRUCTOR="isConstructor";
private static final String ATTRIBUTE_LOCATION = "location";
private static final String ATTRIBUTE_ISVISIBLE="isVisible";
private static final String DEFAULT_CATEGORYNAME = null;
private Map<String, Category> categories;
private List<URL> jsLibs = new ArrayList<URL>( );
/**
* Return all the categories defined by extensions.
*
* @return
* @throws BirtException
*/
public IScriptFunctionCategory[] getCategories( )
throws BirtException
{
return getCategoryMap( ).values( )
.toArray( new IScriptFunctionCategory[]{} );
}
/**
* Return the functions that defined in a category.
*
* @param categoryName
* @return
* @throws BirtException
*/
public IScriptFunction[] getFunctions( String categoryName )
throws BirtException
{
if ( getCategoryMap( ).containsKey( categoryName ) )
{
Category category = getCategoryMap( ).get( categoryName );
return category.getFunctions( );
}
return new IScriptFunction[0];
}
/**
* Register script functions to scope.
*
* @param cx
* @param scope
* @throws BirtException
*/
public void registerScriptFunction( Context cx, Scriptable scope )
throws BirtException
{
List<CategoryWrapper> wrapperedCategories = getWrapperedCategories( );
for ( CategoryWrapper category : wrapperedCategories )
{
ScriptableObject.putProperty( scope,
category.getClassName( ),
category );
}
for ( URL url : jsLibs )
{
Script script;
try
{
script = cx.compileReader( new BufferedReader( new InputStreamReader( url.openStream( ) ) ),
null,
0,
null );
script.exec( cx, scope );
}
catch ( IOException e )
{
}
}
}
/**
* Return the category map.
*
* @return
*/
private synchronized Map<String, Category> getCategoryMap( )
{
if ( categories != null )
return categories;
categories = new HashMap<String, Category>( );
//Find the extension point.
IExtensionRegistry extReg = Platform.getExtensionRegistry( );
IExtensionPoint extPoint = extReg.getExtensionPoint( EXTENSION_POINT );
if ( extPoint == null )
return categories;
//Fetch all extensions
IExtension[] exts = extPoint.getExtensions( );
if ( exts == null )
{
return categories;
}
//populate category map as per extension.
for ( int e = 0; e < exts.length; e++ )
{
try
{
IConfigurationElement[] configElems = exts[e].getConfigurationElements( );
if ( configElems == null )
continue;
for ( int i = 0; i < configElems.length; i++ )
{
boolean isVisible = extractBoolean( configElems[i].getAttribute( ATTRIBUTE_ISVISIBLE ),
true );
// for element Category
if ( configElems[i].getName( ).equals( ELEMENT_CATEGORY ) )
{
Category category = new Category( configElems[i].getAttribute( ATTRIBUTE_NAME ),
configElems[i].getAttribute( ATTRIBUTE_DESC ),
isVisible );
categories.put( category.getName( ), category );
IScriptFunctionFactory factory = null;
if ( configElems[i].getAttribute( ATTRIBUTE_FACTORYCLASS ) != null )
factory = (IScriptFunctionFactory) configElems[i].createExecutableExtension( ATTRIBUTE_FACTORYCLASS );
IConfigurationElement[] functions = configElems[i].getChildren( ELEMENT_FUNCTION );
for ( int j = 0; j < functions.length; j++ )
{
IScriptFunction function = getScriptFunction( category,
factory,
functions[j] );
if ( function != null )
category.addFunction( function );
}
}
// For element function that are not under certain category.
// Usually those functions are
// defined in .js file
else if ( configElems[i].getName( )
.equals( ELEMENT_FUNCTION ) )
{
if ( categories.get( DEFAULT_CATEGORYNAME ) == null )
{
categories.put( DEFAULT_CATEGORYNAME,
new Category( DEFAULT_CATEGORYNAME,
null,
isVisible ) );
}
IScriptFunction function = getScriptFunction( categories.get( DEFAULT_CATEGORYNAME ),
null,
configElems[i] );
if ( function != null )
categories.get( DEFAULT_CATEGORYNAME )
.addFunction( function );
}
// Populate the .js script library
else if ( configElems[i].getName( ).equals( ELEMENT_JSLIB ) )
{
populateResources( jsLibs, ".js", configElems[i] );
}
}
}
catch ( BirtException ex )
{
ex.printStackTrace( );
}
}
return categories;
}
/**
* Populate library resources. The library resources includes .js script lib and .jar java lib.
* @param libs
* @param suffix
* @param confElement
*/
private static void populateResources( List<URL> libs, String suffix,
IConfigurationElement confElement )
{
String source = confElement.getAttribute( ATTRIBUTE_LOCATION );
IExtension extension = confElement.getDeclaringExtension( );
String namespace = extension.getNamespace( );
Bundle bundle = org.eclipse.core.runtime.Platform.getBundle( namespace );
if ( bundle != null )
{
Enumeration<String> files = bundle.getEntryPaths( source );
if ( files != null )
{
// In this case, the bundle denotes to a directory.
while ( files.hasMoreElements( ) )
{
String filePath = files.nextElement( );
if ( filePath.toLowerCase( ).endsWith( suffix ) )
{
URL url = bundle.getEntry( filePath );
if ( url != null )
{
libs.add( url );
}
}
}
}
else
{
// the bundle denotes to a file.
if ( source.toLowerCase( ).endsWith( suffix ) )
{
URL url = bundle.getEntry( source );
if ( url != null )
{
libs.add( url );
}
}
}
}
}
/**
* Create script function out of a function element.
* @param category
* @param factory
* @param function
* @return
*/
private static IScriptFunction getScriptFunction( Category category,
IScriptFunctionFactory factory, IConfigurationElement function )
{
try
{
//Function name
String name = function.getAttribute( ATTRIBUTE_NAME );
//Function Desc
String desc = function.getAttribute( ATTRIBUTE_DESC );
//Allow var argument
String varArgs = function.getAttribute( ATTRIBUTE_ALLOWVARARGUMENT );
boolean allowVarArgs = extractBoolean( varArgs, false );
boolean isConstructor = extractBoolean( function.getAttribute( ATTRIBUTE_ISCONSTRUCTOR ), false);
boolean isStatic = extractBoolean( function.getAttribute( ATTRIBUTE_ISSTATIC ), true);
boolean isVisible = extractBoolean( function.getAttribute( ATTRIBUTE_ISVISIBLE ), true);
String dataType = null;
List<IScriptFunctionArgument> arguments = new ArrayList<IScriptFunctionArgument>( );
//Populate function return data type info.
if ( hasChildren( ELEMENT_DATATYPE, function ) )
{
dataType = function.getChildren( ELEMENT_DATATYPE )[0].getAttribute( ATTRIBUTE_VALUE );
}
//Popualte function argument info
if ( hasChildren( ELEMENT_ARGUMENT, function ) )
{
for ( int i = 0; i < function.getChildren( ELEMENT_ARGUMENT ).length; i++ )
{
arguments.add( getScriptFunctionArgument( function.getChildren( ELEMENT_ARGUMENT )[i] ) );
}
}
return new ScriptFunction( name,
category,
arguments.toArray( new IScriptFunctionArgument[0] ),
dataType,
desc,
factory == null ? null : factory.getFunctionExecutor( name ),
allowVarArgs,
isStatic,
isConstructor,
isVisible );
}
catch ( Exception e )
{
return null;
}
}
private static boolean extractBoolean( String strValue, boolean ifNull )
throws BirtException
{
boolean booleanValue = strValue == null ? ifNull
: DataTypeUtil.toBoolean( strValue );
return booleanValue;
}
/**
* Populate function argument.
* @param argument
* @return
* @throws BirtException
*/
private static IScriptFunctionArgument getScriptFunctionArgument(
IConfigurationElement argument ) throws BirtException
{
String name = argument.getAttribute( ATTRIBUTE_NAME );
String desc = argument.getAttribute( ATTRIBUTE_DESC );
//populate whether it is optional argument.
String optional = argument.getAttribute( ATTRIBUTE_ISOPTIONAL );
boolean isOptional = extractBoolean( optional, false );
String dataType = null;
//Populate data type
if ( hasChildren( ELEMENT_DATATYPE, argument ) )
{
dataType = argument.getChildren( ELEMENT_DATATYPE )[0].getAttribute( ATTRIBUTE_VALUE );
}
return new Argument( name,
dataType,
desc,
isOptional );
}
/**
*
* @param name
* @param element
* @return
*/
private static boolean hasChildren( String name,
IConfigurationElement element )
{
IConfigurationElement[] children = element.getChildren( name );
return children != null && children.length > 0;
}
/**
* Create category wrapper.
*
* @return
* @throws BirtException
*/
private List<CategoryWrapper> getWrapperedCategories( )
throws BirtException
{
List<CategoryWrapper> result = new ArrayList<CategoryWrapper>( );
for ( Category category : getCategoryMap( ).values( ) )
{
if ( category.getName( ) != DEFAULT_CATEGORYNAME )
result.add( new CategoryWrapper( category ) );
}
return result;
}
} |
package org.zkoss.ganttz;
import static org.zkoss.ganttz.i18n.I18nHelper._;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.zkoss.ganttz.adapters.IDisabilityConfiguration;
import org.zkoss.ganttz.data.Dependency;
import org.zkoss.ganttz.data.DependencyType;
import org.zkoss.ganttz.data.Position;
import org.zkoss.ganttz.data.Task;
import org.zkoss.ganttz.data.TaskContainer;
import org.zkoss.ganttz.timetracker.TimeTracker;
import org.zkoss.ganttz.timetracker.TimeTrackerComponent;
import org.zkoss.ganttz.timetracker.zoom.IZoomLevelChangedListener;
import org.zkoss.ganttz.timetracker.zoom.ZoomLevel;
import org.zkoss.ganttz.util.MenuBuilder;
import org.zkoss.ganttz.util.MenuBuilder.ItemAction;
import org.zkoss.zk.au.out.AuInvoke;
import org.zkoss.zk.ui.event.Event;
import org.zkoss.zk.ui.event.EventListener;
import org.zkoss.zk.ui.ext.AfterCompose;
import org.zkoss.zul.Menupopup;
import org.zkoss.zul.impl.XulElement;
/**
* Component to show the list of task in the planner
* @author Javier Moran Rua <jmoran@igalia.com>
*/
public class TaskList extends XulElement implements AfterCompose {
private static final int HEIGHT_PER_ROW = 20;
private transient IZoomLevelChangedListener zoomLevelChangedListener;
private List<Task> originalTasks;
private final CommandOnTaskContextualized<?> doubleClickCommand;
private final List<? extends CommandOnTaskContextualized<?>> commandsOnTasksContextualized;
private final FunctionalityExposedForExtensions<?> context;
private final IDisabilityConfiguration disabilityConfiguration;
private FilterAndParentExpandedPredicates predicate;
private List<Task> visibleTasks = new ArrayList<Task>();
private Map<Task, TaskComponent> taskComponentByTask;
public TaskList(
FunctionalityExposedForExtensions<?> context,
CommandOnTaskContextualized<?> doubleClickCommand,
List<Task> tasks,
List<? extends CommandOnTaskContextualized<?>> commandsOnTasksContextualized,
IDisabilityConfiguration disabilityConfiguration,
FilterAndParentExpandedPredicates predicate) {
this.context = context;
this.doubleClickCommand = doubleClickCommand;
this.originalTasks = tasks;
this.commandsOnTasksContextualized = commandsOnTasksContextualized;
this.disabilityConfiguration = disabilityConfiguration;
this.predicate = predicate;
}
public static TaskList createFor(
FunctionalityExposedForExtensions<?> context,
CommandOnTaskContextualized<?> doubleClickCommand,
List<? extends CommandOnTaskContextualized<?>> commandsOnTasksContextualized,
IDisabilityConfiguration disabilityConfiguration,
FilterAndParentExpandedPredicates predicate) {
TaskList result = new TaskList(context, doubleClickCommand, context
.getDiagramGraph().getTopLevelTasks(),
commandsOnTasksContextualized, disabilityConfiguration,
predicate);
return result;
}
public List<DependencyComponent> asDependencyComponents(
Collection<? extends Dependency> dependencies) {
List<DependencyComponent> result = new ArrayList<DependencyComponent>();
for (Dependency dependency : dependencies) {
result.add(new DependencyComponent(taskComponentByTask
.get(dependency.getSource()), taskComponentByTask
.get(dependency.getDestination()), dependency));
}
return result;
}
public DependencyComponent asDependencyComponent(Dependency dependency) {
return asDependencyComponents(Arrays.asList(dependency)).get(0);
}
public synchronized void addTaskComponent(TaskRow beforeThis,
final TaskComponent taskComponent, boolean relocate) {
insertBefore(taskComponent.getRow(), beforeThis);
addContextMenu(taskComponent);
addListenerForTaskComponentEditForm(taskComponent);
taskComponent.afterCompose();
if (relocate) {
setHeight(getHeight());// forcing smart update
adjustZoomColumnsHeight();
getGanttPanel().getDependencyList().redrawDependencies();
}
}
public synchronized void addTaskComponent(
final TaskComponent taskComponent, boolean relocate) {
addTaskComponent(null, taskComponent, relocate);
}
public void addTasks(Position position, Collection<? extends Task> newTasks) {
if (position.isAppendToTop()) {
for (Task t : newTasks) {
TaskComponent taskComponent = TaskComponent.asTaskComponent(t,
this);
addTaskComponent(taskComponent, true);
taskComponent.publishTaskComponents(taskComponentByTask);
}
} else if (position.isAtTop()) {
final int insertionPosition = position.getInsertionPosition();
List<TaskComponent> topTaskComponents = getTopLevelTaskComponents();
TaskRow beforeThis = insertionPosition < topTaskComponents.size() ? topTaskComponents
.get(insertionPosition).getRow()
: null;
for (Task t : newTasks) {
TaskComponent toAdd = TaskComponent.asTaskComponent(t, this);
addTaskComponent(beforeThis, toAdd, true);
toAdd.publishTaskComponents(taskComponentByTask);
beforeThis = (TaskRow) toAdd.getRow().getNextSibling();
}
} else {
Task mostRemoteAncestor = position.getMostRemoteAncestor();
TaskComponent taskComponent = find(mostRemoteAncestor);
if (taskComponent instanceof TaskContainerComponent) {
TaskContainerComponent container = (TaskContainerComponent) taskComponent;
container.insert(position, newTasks);
} else {
// TODO turn taskComponent into container
}
}
}
TaskComponent find(Task task) {
List<TaskComponent> taskComponents = getTaskComponents();
for (TaskComponent taskComponent : taskComponents) {
if (taskComponent.getTask().equals(task)) {
return taskComponent;
}
}
return null;
}
private void addListenerForTaskComponentEditForm(
final TaskComponent taskComponent) {
if (doubleClickCommand == null) {
return;
}
taskComponent.addEventListener("onDoubleClick", new EventListener() {
@Override
public void onEvent(Event event) throws Exception {
doubleClickCommand.doAction(taskComponent);
}
});
}
private void addContextMenu(final TaskComponent taskComponent) {
taskComponent.addEventListener("onRightClick", new EventListener() {
@Override
public void onEvent(Event event) throws Exception {
try {
getContextMenuFor(taskComponent).open(taskComponent);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@Override
public String getHeight() {
return getTasksNumber() * HEIGHT_PER_ROW + "px";
}
private TimeTrackerComponent getTimeTrackerComponent() {
return getGanttPanel().getTimeTrackerComponent();
}
IDatesMapper getMapper() {
return getTimeTracker().getMapper();
}
private TimeTracker getTimeTracker() {
return getTimeTrackerComponent().getTimeTracker();
}
private List<TaskComponent> getTopLevelTaskComponents() {
List<TaskComponent> result = new ArrayList<TaskComponent>();
for (TaskComponent taskComponent : getTaskComponents()) {
if (taskComponent.isTopLevel()) {
result.add(taskComponent);
}
}
return result;
}
private List<TaskComponent> getTaskComponents() {
ArrayList<TaskComponent> result = new ArrayList<TaskComponent>();
for (Object child : getChildren()) {
if (child instanceof TaskRow) {
TaskRow row = (TaskRow) child;
result.add(row.getChild());
}
}
return result;
}
public int getTasksNumber() {
return getTaskComponents().size();
}
@Override
public void afterCompose() {
List<TaskComponent> taskComponents = new ArrayList<TaskComponent>();
for (Task task : originalTasks) {
TaskComponent taskComponent = TaskComponent.asTaskComponent(task,
this);
addTaskComponent(taskComponent, false);
taskComponents.add(taskComponent);
visibleTasks.add(task);
}
if (zoomLevelChangedListener == null) {
zoomLevelChangedListener = new IZoomLevelChangedListener() {
@Override
public void zoomLevelChanged(ZoomLevel detailLevel) {
for (TaskComponent taskComponent : getTaskComponents()) {
taskComponent.zoomChanged();
}
adjustZoomColumnsHeight();
}
};
getTimeTracker().addZoomListener(zoomLevelChangedListener);
}
taskComponentByTask = new HashMap<Task, TaskComponent>();
for (TaskComponent taskComponent : taskComponents) {
taskComponent.publishTaskComponents(taskComponentByTask);
}
reload(false);
}
private Map<TaskComponent, Menupopup> contextMenus = new HashMap<TaskComponent, Menupopup>();
private Menupopup getContextMenuFor(TaskComponent taskComponent) {
if (contextMenus.get(taskComponent) == null) {
MenuBuilder<TaskComponent> menuBuilder = MenuBuilder.on(getPage(),
getTaskComponents());
if (disabilityConfiguration.isAddingDependenciesEnabled()) {
menuBuilder.item(_("Add Dependency"),
"/common/img/ico_dependency.png",
new ItemAction<TaskComponent>() {
@Override
public void onEvent(TaskComponent choosen,
Event event) {
choosen.addDependency();
}
});
}
for (CommandOnTaskContextualized<?> command : commandsOnTasksContextualized) {
if (command.accepts(taskComponent)) {
menuBuilder.item(command.getName(), command.getIcon(),
command.toItemAction());
}
}
Menupopup result = menuBuilder.createWithoutSettingContext();
contextMenus.put(taskComponent, result);
return result;
}
return contextMenus.get(taskComponent);
}
GanttPanel getGanttPanel() {
return (GanttPanel) getParent();
}
public void adjustZoomColumnsHeight() {
response("adjust_height", new AuInvoke(TaskList.this, "adjust_height"));
}
public void hideTaskComponent(TaskComponent subtaskComponent) {
removeChild(subtaskComponent.getRow());
}
public void redrawDependencies() {
getGanttPanel().getDependencyList().redrawDependencies();
}
public void remove(Task task) {
for (TaskComponent taskComponent : getTaskComponents()) {
if (taskComponent.getTask().equals(task)) {
taskComponent.remove();
return;
}
}
}
public void addDependency(TaskComponent source, TaskComponent destination) {
context.addDependency(new Dependency(source.getTask(), destination
.getTask(), DependencyType.END_START));
}
public IDisabilityConfiguration getDisabilityConfiguration() {
return disabilityConfiguration;
}
public void reload(boolean relocate) {
ArrayList<Task> tasksPendingToAdd = new ArrayList<Task>();
reload(originalTasks, tasksPendingToAdd, relocate);
addPendingTasks(tasksPendingToAdd, null, relocate);
getGanttPanel().getDependencyList().redrawDependencies();
}
private void reload(List<Task> tasks, List<Task> tasksPendingToAdd,
boolean relocate) {
for (Task task : tasks) {
if (visibleTasks.contains(task)) {
addPendingTasks(tasksPendingToAdd, rowFor(task),
relocate);
}
if (predicate.accepts(task)) {
if (!visibleTasks.contains(task)) {
tasksPendingToAdd.add(task);
}
} else {
if (visibleTasks.contains(task)) {
TaskComponent taskComponent = find(task);
hideTaskComponent(taskComponent);
visibleTasks.remove(task);
task.setVisible(false);
}
}
if (task instanceof TaskContainer) {
reload(task.getTasks(), tasksPendingToAdd, relocate);
}
}
}
private TaskRow rowFor(Task task) {
TaskComponent taskComponent = find(task);
return taskComponent == null ? null : taskComponent.getRow();
}
private void addPendingTasks(List<Task> tasksPendingToAdd,
TaskRow insertBefore, boolean relocate) {
if (tasksPendingToAdd.isEmpty()) {
return;
}
for (Task taskToAdd : tasksPendingToAdd) {
TaskComponent taskComponent = taskComponentByTask.get(taskToAdd);
if (taskComponent == null) {
taskComponent = TaskComponent.asTaskComponent(taskToAdd, this);
taskComponent.publishTaskComponents(taskComponentByTask);
}
addTaskComponent(insertBefore, taskComponent, relocate);
visibleTasks.add(taskToAdd);
taskToAdd.setVisible(true);
}
tasksPendingToAdd.clear();
}
public void setPredicate(FilterAndParentExpandedPredicates predicate) {
this.predicate = predicate;
reload(false);
}
} |
package com.orientechnologies.orient.core.sql.functions.sequence;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.exception.OCommandExecutionException;
import com.orientechnologies.orient.core.metadata.sequence.OSequence;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterItem;
import com.orientechnologies.orient.core.sql.functions.OSQLFunctionConfigurableAbstract;
/**
* Returns a sequence by name.
*
* @author Luca Garulli
*/
public class OSQLFunctionSequence extends OSQLFunctionConfigurableAbstract {
public static final String NAME = "sequence";
public OSQLFunctionSequence() {
super(NAME, 1, 1);
}
@Override
public Object execute(Object iThis, OIdentifiable iCurrentRecord, Object iCurrentResult, Object[] iParams,
OCommandContext iContext) {
final String seqName;
if (configuredParameters[0] instanceof OSQLFilterItem)
seqName = (String) ((OSQLFilterItem) configuredParameters[0]).getValue(iCurrentRecord, iCurrentResult, iContext);
else
seqName = configuredParameters[0].toString();
OSequence result = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSequenceLibrary().getSequence(seqName);
if (result == null) {
throw new OCommandExecutionException("Sequence not found: " + seqName);
}
return result;
}
@Override
public Object getResult() {
return null;
}
@Override
public String getSyntax() {
return "sequence(<name>)";
}
@Override
public boolean aggregateResults() {
return false;
}
} |
package com.exedio.cope.pattern;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import com.exedio.cope.Attribute;
import com.exedio.cope.SetValue;
import com.exedio.cope.FunctionAttribute;
import com.exedio.cope.Item;
import com.exedio.cope.ItemAttribute;
import com.exedio.cope.Pattern;
import com.exedio.cope.Type;
import com.exedio.cope.UniqueConstraint;
public final class Qualifier extends Pattern
{
private final ItemAttribute parent;
private final FunctionAttribute[] keys;
private final List<FunctionAttribute> keyList;
private final UniqueConstraint qualifyUnique;
private List<Attribute> attributes;
public Qualifier(final UniqueConstraint qualifyUnique)
{
if(qualifyUnique==null)
throw new RuntimeException(
"argument of qualifier constructor is null, " +
"may happen due to bad class intialization order.");
final List attributes = qualifyUnique.getUniqueAttributes();
if(attributes.size()<2)
throw new RuntimeException(attributes.toString());
this.parent = (ItemAttribute)attributes.get(0);
this.keys = new FunctionAttribute[attributes.size()-1];
for(int i = 0; i<this.keys.length; i++)
this.keys[i] = (FunctionAttribute)attributes.get(i+1);
this.keyList = Collections.unmodifiableList(Arrays.asList(this.keys));
this.qualifyUnique = qualifyUnique;
}
// TODO implicit external source: new Qualifier(QualifiedStringQualifier.key))
// TODO internal source: new Qualifier(stringAttribute(OPTIONAL))
// TODO use registerPattern on sources
public final ItemAttribute getParent()
{
return parent;
}
public final List<FunctionAttribute> getKeys()
{
return keyList;
}
public final UniqueConstraint getQualifyUnique()
{
return qualifyUnique;
}
public void initialize()
{
if(this.attributes!=null)
throw new RuntimeException();
final Type type = qualifyUnique.getType();
final List<Attribute> typeAttributes = type.getAttributes();
final ArrayList<Attribute> attributesModifiyable = new ArrayList<Attribute>(typeAttributes.size());
for(final Attribute attribute : type.getAttributes())
{
if(attribute!=parent && !keyList.contains(attribute))
attributesModifiyable.add(attribute);
}
this.attributes = Collections.unmodifiableList(attributesModifiyable);
}
public final List<Attribute> getAttributes()
{
if(this.attributes==null)
throw new RuntimeException();
return attributes;
}
public final Item getQualifier(final Object[] values)
{
return qualifyUnique.searchUnique(values);
}
public final Object get(final Object[] values, final FunctionAttribute attribute)
{
final Item item = qualifyUnique.searchUnique(values);
if(item!=null)
return attribute.get(item);
else
return null;
}
public Item getForSet(final Object[] keys)
{
Item item = qualifyUnique.searchUnique(keys);
if(item==null)
{
final SetValue[] keySetValues = new SetValue[keys.length];
int j = 0;
for(Iterator i = qualifyUnique.getUniqueAttributes().iterator(); i.hasNext(); j++)
{
final FunctionAttribute uniqueAttribute = (FunctionAttribute)i.next();
keySetValues[j] = new SetValue(uniqueAttribute, keys[j]);
}
item = qualifyUnique.getType().newItem(keySetValues);
}
return item;
}
public Item set(final Object[] keys, final SetValue[] values)
{
Item item = qualifyUnique.searchUnique(keys);
if(item==null)
{
final SetValue[] keyValues = new SetValue[values.length + keys.length];
System.arraycopy(values, 0, keyValues, 0, values.length);
int j = 0;
for(Iterator i = qualifyUnique.getUniqueAttributes().iterator(); i.hasNext(); j++)
{
final FunctionAttribute uniqueAttribute = (FunctionAttribute)i.next();
keyValues[j + values.length] = new SetValue(uniqueAttribute, keys[j]);
}
item = qualifyUnique.getType().newItem(keyValues);
}
else
{
item.set(values);
}
return item;
}
} |
package com.s3auth.rest;
import com.rexsl.core.Manifests;
import com.rexsl.page.BasePage;
import com.rexsl.page.JaxbBundle;
import com.rexsl.page.Link;
import com.s3auth.hosts.User;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
/**
* Base RESTful page.
*
* <p>The class is mutable and NOT thread-safe.
*
* @author Yegor Bugayenko (yegor@tpc2.com)
* @version $Id$
* @since 0.0.1
*/
@XmlRootElement(name = "page")
@XmlAccessorType(XmlAccessType.NONE)
public class CommonPage extends BasePage<CommonPage, BaseRs> {
/**
* Set authenticated user.
* @param user The user
* @return Itself
*/
public final CommonPage authenticated(final User user) {
this.link(new Link("logout", "/a/out"));
this.append(new JaxbUser(user));
return this;
}
/**
* Render it.
* @return JAX-RS response
*/
public final Response.ResponseBuilder render() {
BaseRs.class.cast(this.home()).render(this);
final Response.ResponseBuilder builder = Response.ok();
this.append(
new JaxbBundle("version")
.add("name", Manifests.read("S3Auth-Version"))
.up()
.add("revision", Manifests.read("S3Auth-Revision"))
.up()
.add("date", Manifests.read("S3Auth-Date"))
.up()
);
builder.entity(this);
builder.type(MediaType.TEXT_XML);
builder.type(HttpHeaders.VARY, "Cookie");
BaseRs.class.cast(this.home()).render(builder);
return builder;
}
} |
package com.s3auth.rest;
import com.jcabi.aspects.Loggable;
import com.rexsl.page.Link;
import com.s3auth.hosts.Domain;
import com.s3auth.hosts.Stats;
import java.util.Collection;
import java.util.LinkedList;
import javax.ws.rs.core.UriInfo;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
/**
* JAXB domain.
*
* <p>The class is immutable and thread-safe.
*
* @author Yegor Bugayenko (yegor@tpc2.com)
* @version $Id$
* @since 0.0.1
*/
@XmlRootElement(name = "domain")
@XmlAccessorType(XmlAccessType.NONE)
@Loggable(Loggable.DEBUG)
public final class JaxbDomain {
/**
* The domain.
*/
private final transient Domain domain;
/**
* The URI info.
*/
private final transient UriInfo info;
/**
* The stats for this domain.
*/
private final transient JaxbStats stats;
/**
* Public ctor for JAXB.
*/
public JaxbDomain() {
throw new IllegalStateException("This ctor should never be called");
}
/**
* Private ctor.
* @param dmn The domain
* @param inf URI info of the home
* @param statistics The stats for this domain
*/
public JaxbDomain(final Domain dmn, final UriInfo inf,
final Stats statistics) {
this.domain = dmn;
this.info = inf;
this.stats = new JaxbStats(statistics);
}
/**
* Get name.
* @return The name
*/
@XmlElement
public String getName() {
return this.domain.name();
}
/**
* Get key.
* @return The key
*/
@XmlElement
public String getKey() {
return this.domain.key();
}
/**
* Get secret.
* @return The secret
*/
@XmlElement
public String getSecret() {
return this.domain.secret();
}
/**
* Get bucket.
* @return The bucket
*/
@XmlElement
public String getBucket() {
return this.domain.bucket();
}
/**
* Get region.
* @return The region
*/
@XmlElement
public String getRegion() {
return this.domain.region();
}
/**
* Get syslog.
* @return The syslog
*/
@XmlElement
public String getSyslog() {
return this.domain.syslog();
}
/**
* Get stats for this domain.
* @return The stats
*/
@XmlElement
public JaxbStats getStats() {
return this.stats;
}
/**
* Get links.
* @return The links
*/
@XmlElement(name = "link")
@XmlElementWrapper(name = "links")
public Collection<Link> getLinks() {
final Collection<Link> links = new LinkedList<Link>();
links.add(
new Link(
"remove",
this.info.getBaseUriBuilder().clone()
.path("/remove")
.queryParam("host", "{name}")
.build(this.getName())
)
);
return links;
}
/**
* JAXB domain stats.
*/
@XmlRootElement(name = "stats")
private static final class JaxbStats {
/**
* The underlying stats.
*/
private final transient Stats stats;
/**
* Default ctor required by JAXB.
*/
@SuppressWarnings("unused")
JaxbStats() {
throw new IllegalStateException(
"Default constructor should never be called"
);
}
/**
* Constructor.
* @param statistics The stats for this domain.
*/
JaxbStats(final Stats statistics) {
this.stats = statistics;
}
/**
* Get bytes transferred.
* @return Bytes transferred.
*/
@XmlElement
public long getBytesTransferred() {
return this.stats.bytesTransferred();
}
}
} |
package the8472.mldht.indexing;
import static java.lang.Math.min;
import static the8472.utils.Functional.typedGet;
import the8472.bencode.BDecoder;
import the8472.bencode.BEncoder;
import the8472.bt.TorrentUtils;
import the8472.bt.UselessPeerFilter;
import the8472.mldht.Component;
import the8472.mldht.TorrentFetcher;
import the8472.mldht.TorrentFetcher.FetchTask;
import the8472.mldht.indexing.TorrentDumper.FetchStats.State;
import the8472.utils.ConfigReader;
import the8472.utils.concurrent.LoggingScheduledThreadPoolExecutor;
import the8472.utils.io.FileIO;
import lbms.plugins.mldht.kad.DHT;
import lbms.plugins.mldht.kad.Key;
import lbms.plugins.mldht.kad.RPCServer;
import lbms.plugins.mldht.kad.DHT.LogLevel;
import lbms.plugins.mldht.kad.messages.AnnounceRequest;
import lbms.plugins.mldht.kad.messages.GetPeersRequest;
import lbms.plugins.mldht.kad.messages.MessageBase;
import lbms.plugins.mldht.kad.utils.ThreadLocalUtils;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.AbstractMap;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class TorrentDumper implements Component {
Collection<DHT> dhts;
Path storageDir = Paths.get(".", "dump-storage");
Path statsDir = storageDir.resolve("stats");
Path torrentDir = storageDir.resolve("torrents");
ScheduledThreadPoolExecutor scheduler;
ConcurrentSkipListMap<Key, FetchStats> fromMessages;
ConcurrentMap<InetAddress, Long> blocklist = new ConcurrentHashMap<>();
TorrentFetcher fetcher;
UselessPeerFilter pf;
static class FetchStats {
final Key k;
int insertCount = 1;
InetAddress lastTouchedBy;
long creationTime = -1;
long lastFetchTime = -1;
State state = State.INITIAL;
enum State {
INITIAL,
PRIORITY,
FAILED;
public Path stateDir(Path statsdir) {
return statsdir.resolve(name().toLowerCase());
}
}
public FetchStats(Key k, Consumer<FetchStats> init) {
Objects.requireNonNull(k);
this.k = k;
if(init != null)
init.accept(this);
}
static FetchStats fromBencoded(Map<String, Object> map) {
Key k = typedGet(map, "k", byte[].class).map(Key::new).orElseThrow(() -> new IllegalArgumentException("missing key in serialized form"));
return new FetchStats(k, fs -> {
typedGet(map, "addr", byte[].class).map(t -> {
try {
return InetAddress.getByAddress(t);
} catch (UnknownHostException e) {
return null;
}
}).ifPresent(addr -> fs.lastTouchedBy = addr);
typedGet(map, "state", byte[].class).map(b -> new String(b, StandardCharsets.ISO_8859_1)).map(str -> {
try {
return State.valueOf(str);
} catch (IllegalArgumentException e) {
return null;
}
}).ifPresent(st -> fs.state = st);
typedGet(map, "created", Long.class).ifPresent(time -> fs.creationTime = time);
typedGet(map, "cnt", Long.class).ifPresent(cnt -> fs.insertCount = cnt.intValue());
typedGet(map, "fetchtime", Long.class).ifPresent(time -> fs.lastFetchTime = time);
});
}
Map<String, Object> forBencoding() {
Map<String, Object> map = new TreeMap<>();
map.put("k", k.getHash());
map.put("cnt", insertCount);
map.put("addr", lastTouchedBy.getAddress());
map.put("created", creationTime);
map.put("state", state.name());
map.put("fetchtime", lastFetchTime);
return map;
}
public Key getK() {
return k;
}
public FetchStats merge(FetchStats other) {
if(!k.equals(other.k))
throw new IllegalArgumentException("key mismatch");
boolean otherIsNewer = other.creationTime > creationTime;
insertCount += other.insertCount;
lastTouchedBy = otherIsNewer ? other.lastTouchedBy : lastTouchedBy;
creationTime = min(creationTime, other.creationTime);
return this;
}
public void setState(State newState) {
state = newState;
}
public Path name(Path dir, String suffix) {
String hex = k.toString(false);
return dir.resolve(hex.substring(0, 2)).resolve(hex.substring(2, 4)).resolve(hex+suffix);
}
public Path statsName(Path statsDir, State st) {
if(st == null)
st = state;
return name(st.stateDir(statsDir), ".stats");
}
}
@Override
public void start(Collection<DHT> dhts, ConfigReader config) {
this.dhts = dhts;
fromMessages = new ConcurrentSkipListMap<>();
scheduler = new LoggingScheduledThreadPoolExecutor(2, new LoggingScheduledThreadPoolExecutor.NamedDaemonThreadFactory("torrent dumper"), this::log);
fetcher = new TorrentFetcher(dhts);
fetcher.setMaxOpen(40);
dhts.forEach(d -> d.addIncomingMessageListener(this::incomingMessage));
try {
pf = new UselessPeerFilter(storageDir.resolve("bad-peers"));
Files.createDirectories(torrentDir);
for(State st : FetchStats.State.values()) {
Files.createDirectories(st.stateDir(statsDir));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
fetcher.setPeerFilter(pf);
scheduler.scheduleWithFixedDelay(this::dumpStats, 10, 1, TimeUnit.SECONDS);
scheduler.scheduleWithFixedDelay(this::startFetches, 10, 1, TimeUnit.SECONDS);
scheduler.scheduleWithFixedDelay(this::cleanBlocklist, 1, 1, TimeUnit.MINUTES);
scheduler.scheduleWithFixedDelay(this::diagnostics, 30, 30, TimeUnit.SECONDS);
scheduler.scheduleWithFixedDelay(this::purgeStats, 5, 15, TimeUnit.MINUTES);
scheduler.scheduleWithFixedDelay(this::scrubActive, 10, 20, TimeUnit.SECONDS);
scheduler.scheduleWithFixedDelay(() -> {
try {
pf.clean();
} catch (IOException e) {
log(e);
}
}, 10, 5, TimeUnit.MINUTES);
}
void log(Throwable t) {
DHT.log(t, LogLevel.Error);
}
void cleanBlocklist() {
long now = System.currentTimeMillis();
blocklist.entrySet().removeIf(e -> {
return (now - e.getValue()) > TimeUnit.MINUTES.toMillis(10);
});
}
void incomingMessage(DHT d, MessageBase m) {
if(m instanceof GetPeersRequest) {
GetPeersRequest gpr = (GetPeersRequest) m;
RPCServer srv = m.getServer();
Key theirID = gpr.getID();
if(d.getNode().isLocalId(theirID))
return;
Key ourId = srv.getDerivedID();
Key target = gpr.getInfoHash();
if(Stream.of(theirID, ourId, target).distinct().count() != 3)
return;
int myCloseness = ourId.distance(target).leadingOneBit();
int theirCloseness = theirID.distance(target).leadingOneBit();
if(theirCloseness > myCloseness && theirCloseness - myCloseness >= 8)
return; // they're looking for something that's significantly closer to their own ID than we are
process(gpr.getInfoHash(), gpr.getOrigin().getAddress(), null);
}
if(m instanceof AnnounceRequest) {
AnnounceRequest anr = (AnnounceRequest) m;
process(anr.getInfoHash(), anr.getOrigin().getAddress(), anr.getNameUTF8().orElse(null));
}
}
void process(Key k, InetAddress src, String name) {
fromMessages.compute(k, (unused, f) -> {
FetchStats f2 = new FetchStats(k, init -> {
init.lastTouchedBy = src;
init.insertCount = 1;
init.creationTime = System.currentTimeMillis();
});
return f == null ? f2 : f.merge(f2);
});
}
Key cursor = Key.MIN_KEY;
void dumpStats() {
long now = System.currentTimeMillis();
for(;;) {
Entry<Key, FetchStats> entry = fromMessages.ceilingEntry(cursor);
if(entry == null) {
cursor = Key.MIN_KEY;
break;
}
Key k = entry.getKey();
FetchStats s = entry.getValue();
fromMessages.remove(k);
cursor = k.add(Key.setBit(159));
if(Files.exists(s.name(torrentDir, ".torrent"))) {
continue;
}
try {
Optional<Path> existing = Stream.of(s.statsName(statsDir, FetchStats.State.FAILED), s.statsName(statsDir, FetchStats.State.PRIORITY), s.statsName(statsDir, FetchStats.State.INITIAL)).filter(Files::isRegularFile).findFirst();
if(!existing.isPresent()) {
// only throttle IPs for new hashes we don't already know about and wouldn't try anyway
if(activeCount.get() > 50 && blocklist.putIfAbsent(s.lastTouchedBy, now) != null)
continue;
}
if(existing.isPresent()) {
Path p = existing.get();
try {
FetchStats old = FetchStats.fromBencoded(new BDecoder().decode(ByteBuffer.wrap(Files.readAllBytes(p))));
// avoid double-taps
if(old.lastTouchedBy.equals(s.lastTouchedBy))
return;
s.merge(old);
if(old.state != FetchStats.State.INITIAL)
s.state = old.state;
} catch (IOException e) {
log(e);
}
}
if(s.state == State.INITIAL && s.insertCount > 1) {
s.state = State.PRIORITY;
if(existing.isPresent())
Files.deleteIfExists(existing.get());
}
Path statsFile = s.statsName(statsDir, null);
Path tempFile = Files.createTempFile(statsDir, statsFile.getFileName().toString(), ".stats");
try(FileChannel ch = FileChannel.open(tempFile, StandardOpenOption.WRITE)) {
ByteBuffer buf = new BEncoder().encode(s.forBencoding(), 16*1024);
ch.write(buf);
ch.close();
Files.createDirectories(statsFile.getParent());
Files.move(tempFile, statsFile, StandardCopyOption.ATOMIC_MOVE);
} finally {
Files.deleteIfExists(tempFile);
}
} catch (Exception e) {
log(e);
}
}
}
void purgeStats() {
Path failedDir = FetchStats.State.FAILED.stateDir(statsDir);
long now = System.currentTimeMillis();
try (Stream<Path> pst = Files.find(failedDir, 5, (p, a) -> a.isRegularFile())) {
Stream<FetchStats> st = filesToFetchers(pst);
st.filter(Objects::nonNull).filter(stat -> now - stat.lastFetchTime > TimeUnit.HOURS.toMillis(2)).forEach(stat -> {
try {
Files.deleteIfExists(stat.statsName(statsDir, null));
} catch (IOException e) {
log(e);
}
});
} catch (UncheckedIOException | IOException e) {
log(e);
}
// 0 -> stats, 1 -> {failed|initial|prio}, 2 -> 00, 3 -> 00/00
try (Stream<Path> st = Files.find(statsDir, 3, (p, attr) -> attr.isDirectory())) {
st.filter(d -> {
try (DirectoryStream<Path> dst = Files.newDirectoryStream(d)) {
return !dst.iterator().hasNext();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}).forEach(d -> {
try {
Files.deleteIfExists(d);
} catch(DirectoryNotEmptyException e) {
// someone on another thread wrote to it. do nothing
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
} catch (UncheckedIOException | IOException e) {
log(e);
}
}
Stream<Path> dirShuffler(Path p) {
if(!Files.isDirectory(p))
return null;
List<Path> sub;
try(Stream<Path> st = Files.list(p)) {
sub = st.collect(Collectors.toList());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
Collections.shuffle(sub);
return sub.stream();
}
Stream<Path> fetchStatsStream(Stream<Path> rootDirs) throws IOException {
// this does not use a true shuffle, the stream will emit straight runs at the 16bit keyspace granularity
// and then batches of such runs shuffled at the 8bit level
// it's closer to linear scan from a random starting point
// but polling in small batches should lead to reasonable task randomization without expensive full directory traversal
Stream<Path> leafs = rootDirs.flatMap(d -> {
return Stream.of(d).flatMap(this::dirShuffler).flatMap(this::dirShuffler).flatMap(this::dirShuffler);
});
return leafs;
}
Stream<FetchStats> filesToFetchers(Stream<Path> st) throws IOException {
ThreadLocal<ByteBuffer> bufProvider = new ThreadLocal<>();
return st.map(p -> {
try(FileChannel ch = FileChannel.open(p, StandardOpenOption.READ)) {
long size = ch.size();
ByteBuffer buf = bufProvider.get();
if(buf == null || buf.capacity() < size)
buf = ByteBuffer.allocate((int) (size * 1.5));
buf.clear();
ch.read(buf);
buf.flip();
bufProvider.set(buf);
return FetchStats.fromBencoded(ThreadLocalUtils.getDecoder().decode(buf));
} catch(NoSuchFileException ex) {
// expect async deletes
return null;
} catch(IOException ex) {
log(ex);
return null;
}
}).filter(Objects::nonNull);
}
void startFetches() {
try {
Path prio = FetchStats.State.PRIORITY.stateDir(statsDir);
Path normal = FetchStats.State.INITIAL.stateDir(statsDir);
try(Stream<FetchStats> st = filesToFetchers(fetchStatsStream(Stream.of(prio, normal)))) {
st.limit(200).forEachOrdered(this::fetch);
};
} catch (Exception e) {
log(e);
}
}
AtomicInteger activeCount = new AtomicInteger();
ConcurrentHashMap<Key, FetchTask> activeTasks = new ConcurrentHashMap<>();
void scrubActive() {
// as long as there are young connections it means some fraction of the fetch tasks dies quickly
// we're fine with other ones taking longer as long as that's the case
long youngConnections = activeTasks.values().stream().filter(t -> t.attemptedCount() < 5).count();
if(youngConnections > 15 || activeCount.get() < 90)
return;
Comparator<Map.Entry<FetchTask, Integer>> comp = Map.Entry.comparingByValue();
comp = comp.reversed();
activeTasks.values().stream().map(t -> new AbstractMap.SimpleEntry<>(t, t.attemptedCount())).filter(e -> e.getValue() > 70).sorted(comp).limit(10).forEach(e -> {
e.getKey().stop();
});
}
void fetch(FetchStats stats) {
Key k = stats.getK();
if(activeTasks.containsKey(k))
return;
if(activeCount.get() > 100)
return;
FetchTask t = fetcher.fetch(k, (fetch) -> {
fetch.configureLookup(lookup -> {
lookup.setFastTerminate(true);
lookup.setLowPriority(true);
});
});
activeCount.incrementAndGet();
activeTasks.put(k, t);
t.awaitCompletion().thenRun(() -> {
scheduler.execute(() -> {
// run on the scheduler so we don't end up with interfering file ops
taskFinished(stats, t);
});
});
}
void taskFinished(FetchStats stats, FetchTask t) {
activeCount.decrementAndGet();
blocklist.remove(stats.lastTouchedBy);
activeTasks.remove(t.infohash());
try {
for(FetchStats.State st : FetchStats.State.values()) {
Files.deleteIfExists(stats.statsName(statsDir, st));
}
if(!t.getResult().isPresent()) {
stats.setState(FetchStats.State.FAILED);
stats.lastFetchTime = System.currentTimeMillis();
Path failedStatsFile = stats.statsName(statsDir, null);
Files.createDirectories(failedStatsFile.getParent());
try(FileChannel statsChan = FileChannel.open(failedStatsFile, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE)) {
statsChan.write(new BEncoder().encode(stats.forBencoding(), 4*1024));
}
return;
}
ByteBuffer buf = t.getResult().get();
Path torrentFile = stats.name(torrentDir, ".torrent");
Files.createDirectories(torrentFile.getParent());
try(FileChannel chan = FileChannel.open(torrentFile, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE)) {
chan.write(TorrentUtils.wrapBareInfoDictionary(buf));
}
} catch (Exception e) {
log(e);
}
}
void diagnostics() {
try {
FileIO.writeAndAtomicMove(storageDir.resolve("dumper.log"), (p) -> {
p.format("Fetcher:%n established: %d%n sockets: %d %n%n", fetcher.openConnections(), fetcher.socketcount());
p.format("FetchTasks: %d %n", activeCount.get());
activeTasks.values().forEach(ft -> {
p.println(ft.toString());
});
});
} catch (IOException e) {
log(e);
}
}
@Override
public void stop() {
scheduler.shutdown();
activeTasks.values().forEach(FetchTask::stop);
}
} |
package edu.iu.grid.oim.view.divrep;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.TreeMap;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.log4j.Logger;
import com.webif.divrep.common.Button;
import com.webif.divrep.DivRep;
import com.webif.divrep.Event;
import com.webif.divrep.EventListener;
import com.webif.divrep.common.Static;
import com.webif.divrep.common.CheckBoxFormElement;
import com.webif.divrep.common.FormElement;
import com.webif.divrep.common.Select;
import com.webif.divrep.common.TextArea;
import edu.iu.grid.oim.lib.Authorization;
import edu.iu.grid.oim.lib.StaticConfig;
import edu.iu.grid.oim.model.Context;
import edu.iu.grid.oim.model.ResourceDowntime;
import edu.iu.grid.oim.model.db.DowntimeClassModel;
import edu.iu.grid.oim.model.db.DowntimeSeverityModel;
import edu.iu.grid.oim.model.db.ResourceDowntimeModel;
import edu.iu.grid.oim.model.db.ResourceDowntimeServiceModel;
import edu.iu.grid.oim.model.db.ResourceServiceModel;
import edu.iu.grid.oim.model.db.ServiceModel;
import edu.iu.grid.oim.model.db.ConfigModel.Config;
import edu.iu.grid.oim.model.db.record.DowntimeClassRecord;
import edu.iu.grid.oim.model.db.record.DowntimeSeverityRecord;
import edu.iu.grid.oim.model.db.record.ResourceDowntimeRecord;
import edu.iu.grid.oim.model.db.record.ResourceDowntimeServiceRecord;
import edu.iu.grid.oim.model.db.record.ResourceServiceRecord;
import edu.iu.grid.oim.model.db.record.ServiceRecord;
public class ResourceDowntimeEditor extends FormElement {
static Logger log = Logger.getLogger(ResourceDowntimeEditor.class);
private Context context;
private Button add_button;
private ArrayList<ResourceDowntimeRecord> downtime_recs;
private Authorization auth;
private int resource_id;
public class DowntimeEditor extends FormElement
{
//service details
private TextArea summary;
private Integer downtime_id;
private DateDE start_date;
private TimeDE start_time;
private DateDE end_date;
private TimeDE end_time;
private Select class_id;
private Select severity_id;
private HashMap<Integer/*service_id*/, CheckBoxFormElement> affected_services = new HashMap<Integer, CheckBoxFormElement>();
private Button remove_button;
class DateDE extends FormElement<Date>
{
private static final String default_format = "M/d/yyyy";
private static final String default_jquery_format = "m/d/yy";
String minDate = null;
protected DateDE(DivRep parent) {
super(parent);
value = new Date();//today
}
public void setMinDate(Date d)
{
minDate = "new Date("+d.getTime()+")";
}
protected void onEvent(Event e) {
SimpleDateFormat format = new SimpleDateFormat(default_format);
try {
value = format.parse((String)e.value);
} catch (ParseException e1) {
alert(e1.getMessage() + ". Please specify a valid date such as 4/17/2009");
}
modified(true);
redraw();
}
public void render(PrintWriter out) {
out.write("<div id=\""+getNodeID()+"\">");
if(label != null) {
out.print("<label>"+StringEscapeUtils.escapeHtml(label)+"</label><br/>");
}
SimpleDateFormat format = new SimpleDateFormat(default_format);
String str = format.format(value);
out.write("<input type=\"text\" class=\"datepicker\" value=\""+str+"\"/>");
//setup the datepicker
out.write("<script type=\"text/javascript\">");
out.write("$(document).ready(function() { $(\"#"+getNodeID()+" .datepicker\").datepicker({" +
"onSelect: function(value) {divrep('"+getNodeID()+"', null, value);},"+
"dateFormat: '"+default_jquery_format+"',"+
"beforeShow: function() {$(this).attr('disabled', 'disabled');},"+
"onClose: function() {$(this).attr('disabled', '');},"+
"changeYear: true,"+
"changeMonth: true"
);
out.write("});});");
out.write("</script>");
error.render(out);
out.write("</div>");
}
}
class TimeDE extends FormElement<Integer>
{
Select hour;
Select min;
protected TimeDE(DivRep parent) {
super(parent);
TreeMap<Integer, String> hours = new TreeMap<Integer, String>();
hours.put(0, "0 AM");
hours.put(1, "1 AM");
hours.put(2, "2 AM");
hours.put(3, "3 AM");
hours.put(4, "4 AM");
hours.put(5, "5 AM");
hours.put(6, "6 AM");
hours.put(7, "7 AM");
hours.put(8, "8 AM");
hours.put(9, "9 AM");
hours.put(10, "10 AM");
hours.put(11, "11 AM");
hours.put(12, "12 (Noon)");
hours.put(13, "1 PM (13)");
hours.put(14, "2 PM (14)");
hours.put(15, "3 PM (15)");
hours.put(16, "4 PM (16)");
hours.put(17, "5 PM (17)");
hours.put(18, "6 PM (18)");
hours.put(19, "7 PM (19)");
hours.put(20, "8 PM (20)");
hours.put(21, "9 PM (21)");
hours.put(22, "10 PM (22)");
hours.put(23, "11 PM (23)");
hour = new Select(this, hours);
hour.addEventListener(new EventListener() {
public void handleEvent(Event e) {
Integer h = Integer.valueOf((String)e.value);
int current_min = value%60;
value = h*60 + current_min;
}});
hour.setHasNull(false);
TreeMap<Integer, String> mins = new TreeMap<Integer, String>();
for(int m = 0; m < 60; m+=5) {
mins.put(m, ":" + m);
}
min = new Select(this, mins);
min.addEventListener(new EventListener() {
public void handleEvent(Event e) {
Integer m = Integer.valueOf((String)e.value);
int current_hour = value/60;
value = current_hour*60 + m;
}});
min.setHasNull(false);
Date current = new Date();
setValue(new Timestamp(current.getTime()));
}
public void render(PrintWriter out) {
int value_hour = (int)value/60;
int value_min = (int)value%60;
hour.setValue(value_hour);
min.setValue(value_min);
out.write("<table id=\""+getNodeID()+"\"><tr><td>");
hour.render(out);
out.write("</td><td>");
min.render(out);
error.render(out);
out.write("</td></tr></table>");
}
public void setValue(Timestamp time) {
long sec = time.getTime()/1000;
int sec_inday = (int)(sec % (3600*24));
int mins = sec_inday / 60;
int hours = mins / 60;
//adjust it to 5 minutes increment (since we don't allow selecting sub 5 minutes)
mins = (mins / 5) * 5;
value = mins + hours * 60;
}
public Integer getHour()
{
return value/60;
}
public Integer getMin()
{
return value%60;
}
protected void onEvent(Event e) {
// TODO Auto-generated method stub
}
}
public DowntimeEditor(DivRep parent, ResourceDowntimeRecord rec, Authorization auth) throws SQLException {
super(parent);
downtime_id = rec.id;
new Static(this, "<h3>Duration (UTC)</h3>");
new Static(this, "<table><tr><td>");
start_date = new DateDE(this);
start_date.setMinDate(new Date());
if(rec.start_time != null) {
start_date.setValue(rec.start_time);
}
start_date.addEventListener(new EventListener() {
public void handleEvent(Event e) {
//DowntimeEditor.this.adjustEndTime();
DowntimeEditor.this.validate();
}});
new Static(this, "</td><td>");
start_time = new TimeDE(this);
if(rec.start_time != null) {
start_time.setValue(rec.start_time);
}
start_time.addEventListener(new EventListener() {
public void handleEvent(Event e) {
//DowntimeEditor.this.adjustEndTime();
DowntimeEditor.this.validate();
}});
new Static(this, "</td><td> to </td><td>");
end_date = new DateDE(this);
end_date.setMinDate(new Date());
if(rec.end_time != null) {
end_date.setValue(rec.end_time);
}
end_date.addEventListener(new EventListener() {
public void handleEvent(Event e) {
//DowntimeEditor.this.adjustStartTime();
DowntimeEditor.this.validate();
}});
new Static(this, "</td><td>");
end_time = new TimeDE(this);
if(rec.end_time != null) {
end_time.setValue(rec.end_time);
}
end_time.addEventListener(new EventListener() {
public void handleEvent(Event e) {
//DowntimeEditor.this.adjustStartTime();
DowntimeEditor.this.validate();
}});
new Static(this, "</td></tr></table>");
new Static(this, "<h3>Detail</h3>");
summary = new TextArea(this);
summary.setLabel("Downtime Summary");
summary.setRequired(true);
if(rec.downtime_summary != null) {
summary.setValue(rec.downtime_summary);
}
summary.setWidth(600);
summary.setHeight(200);
TreeMap<Integer, String> class_kv = new TreeMap<Integer, String>();
DowntimeClassModel dcmodel = new DowntimeClassModel(context);
for(DowntimeClassRecord dcrec : dcmodel.getAll()) {
class_kv.put(dcrec.id, dcrec.name);
}
class_id = new Select(this, class_kv);
class_id.setLabel("Class");
class_id.setRequired(true);
if(rec.downtime_class_id != null) {
class_id.setValue(rec.downtime_class_id);
}
else {// Select first element as default, we could set this to any of the choices
if (class_kv != null) class_id.setValue(1);
}
TreeMap<Integer, String> severity_kv = new TreeMap<Integer, String>();
DowntimeSeverityModel smodel = new DowntimeSeverityModel(context);
for(DowntimeSeverityRecord dcrec : smodel.getAll()) {
severity_kv.put(dcrec.id, dcrec.name);
}
severity_id = new Select(this, severity_kv);
severity_id.setLabel("Severity");
severity_id.setRequired(true);
if(rec.downtime_severity_id != null) {
severity_id.setValue(rec.downtime_severity_id);
}
else { // Select first element as default, we could set this to any of the choices
if (severity_kv != null) severity_id.setValue(1);
}
new Static(this, "<h3>Affected Services</h3>");
ResourceServiceModel rsmodel = new ResourceServiceModel(context);
Collection<ResourceServiceRecord> rsrecs = rsmodel.getAllByResourceID(resource_id);
for(ResourceServiceRecord rsrec : rsrecs) {
addService(rsrec.service_id);
}
remove_button = new Button(this, "images/delete.png");
remove_button.setStyle(Button.Style.IMAGE);
remove_button.addEventListener(new EventListener() {
public void handleEvent(Event e) {
removeDowntime(DowntimeEditor.this);
modified(true);
}
});
}
public void addService(Integer service_id)
{
final ServiceModel servicemodel = new ServiceModel(context);
ResourceDowntimeServiceModel rdsmodel = new ResourceDowntimeServiceModel(context);
try {
final CheckBoxFormElement elem = new CheckBoxFormElement(this);
if(service_id != null) {
ServiceRecord srec = servicemodel.get(service_id);
elem.setLabel(srec.name);
} else {
elem.setLabel("(Service Name Not Yet Selected)");
}
affected_services.put(service_id, elem);
ResourceDowntimeServiceRecord keyrec = new ResourceDowntimeServiceRecord();
keyrec.resource_downtime_id = downtime_id;
keyrec.service_id = service_id;
if(rdsmodel.get(keyrec) != null) {
elem.setValue(true);
}
// If this is a new add, then by default have all services selected. Proves to be less error prone! -agopu
if (downtime_id == null) {
elem.setValue(true);
}
redraw();
} catch(SQLException e) {
log.error(e);
}
}
public void removeService(Integer service_id)
{
CheckBoxFormElement check = affected_services.get(service_id);
affected_services.remove(service_id);
remove(check);
redraw();
}
protected void onEvent(Event e) {
// TODO Auto-generated method stub
}
public void render(PrintWriter out) {
out.write("<div class=\"downtime_editor\" id=\""+getNodeID()+"\">");
out.write("<span class=\"right\">");
remove_button.render(out);
out.write("</span>");
for(DivRep child : childnodes) {
if(child == remove_button) continue;
if(child == error) continue;
if(child instanceof FormElement) {
FormElement elem = (FormElement)child;
if(!elem.isHidden()) {
out.print("<div class=\"form_element\">");
child.render(out);
out.print("</div>");
}
} else {
//non form element..
child.render(out);
}
}
error.render(out);
out.write("</div>");
}
//caller should set resource_id
public ResourceDowntimeRecord getDowntimeRecord() {
ResourceDowntimeRecord rec = new ResourceDowntimeRecord();
rec.id = downtime_id;
rec.resource_id = resource_id;
rec.downtime_summary = summary.getValue();
rec.start_time = getStartTime();
rec.end_time = getEndTime();
rec.downtime_class_id = class_id.getValue();
rec.downtime_severity_id = severity_id.getValue();
rec.dn_id = auth.getDNID();
return rec;
}
public Timestamp getStartTime()
{
return convertToTimestamp(start_date.getValue(), start_time.getHour(), start_time.getMin());
}
public Timestamp getEndTime()
{
return convertToTimestamp(end_date.getValue(), end_time.getHour(), end_time.getMin());
}
private Timestamp convertToTimestamp(Date date, int hour, int min)
{
//Calendar cal = (Calendar)Calendar.getInstance().clone();
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, hour);
cal.set(Calendar.MINUTE, min);
cal.set(Calendar.SECOND, 0);
return new Timestamp(cal.getTimeInMillis());
}
public ArrayList<ResourceDowntimeServiceRecord> getAffectedServiceRecords()
{
ArrayList<ResourceDowntimeServiceRecord> list = new ArrayList<ResourceDowntimeServiceRecord>();
for(Integer service_id : affected_services.keySet()) {
CheckBoxFormElement checkbox = affected_services.get(service_id);
if(checkbox.getValue()) {
ResourceDowntimeServiceRecord rec = new ResourceDowntimeServiceRecord();
rec.resource_downtime_id = downtime_id;
rec.service_id = service_id;
list.add(rec);
}
}
return list;
}
public ResourceDowntime getResourceDowntime()
{
ResourceDowntime downtime = new ResourceDowntime();
downtime.downtime = getDowntimeRecord();
downtime.services = getAffectedServiceRecords();
return downtime;
}
public void validate()
{
super.validate();
if(valid == true) {
Timestamp start = getStartTime();
Timestamp end = getEndTime();
if(start.compareTo(end) > 0) {
valid = false;
error.set("Start Time is after the end time. Please correct.");
return;
}
int service_count = 0;
for(Integer service_id : affected_services.keySet()) {
CheckBoxFormElement checkbox = affected_services.get(service_id);
if(checkbox.getValue()) {
++service_count;
}
}
if(service_count == 0) {
valid = false;
error.set("Please select at least one affected service.");
return;
}
}
}
}
public void removeDowntime(DowntimeEditor downtime)
{
remove(downtime);
redraw();
}
public DowntimeEditor addDowntime(ResourceDowntimeRecord rec) throws SQLException {
DowntimeEditor elem = new DowntimeEditor(this, rec, auth);
redraw();
return elem;
}
public ResourceDowntimeEditor(DivRep parent, Context _context, final Integer _resource_id) throws SQLException {
super(parent);
context = _context;
auth = context.getAuthorization();
resource_id = _resource_id;
ResourceDowntimeModel dmodel = new ResourceDowntimeModel(context);
Collection <ResourceDowntimeRecord> dt_records = dmodel.getFutureDowntimesByResourceID(resource_id);
for(ResourceDowntimeRecord drec : dt_records) {
addDowntime(drec);
}
add_button = new Button(this, "Add New Downtime");
add_button.setStyle(Button.Style.ALINK);
add_button.addEventListener(new EventListener() {
public void handleEvent(Event e) {
try {
addDowntime(new ResourceDowntimeRecord());
modified(true);
} catch (SQLException e1) {
log.error(e1);
}
}
});
}
public ArrayList<ResourceDowntime> getResourceDowntimes()
{
ArrayList<ResourceDowntime> downtimes = new ArrayList<ResourceDowntime>();
for(DivRep node : childnodes) {
if(node instanceof DowntimeEditor) {
DowntimeEditor downtime = (DowntimeEditor)node;
downtimes.add(downtime.getResourceDowntime());
}
}
return downtimes;
}
protected void onEvent(Event e) {
// TODO Auto-generated method stub
}
public void render(PrintWriter out) {
int count = 0;
out.print("<div id=\""+getNodeID()+"\">");
for(DivRep node : childnodes) {
if(node instanceof DowntimeEditor) {
count++;
node.render(out);
}
}
// Adding some clear text to make it look less odd. Is there a cleaner way to do this? -agopu
if (count == 0) {
new Static(this, "<p>No existing downtimes for this resource</p>").render(out);
}
add_button.render(out);
out.print("</div>");
}
} |
package edu.jhu.hltcoe.gridsearch;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.PriorityQueue;
import org.apache.log4j.Logger;
import org.jboss.dna.common.statistic.Stopwatch;
import edu.jhu.hltcoe.gridsearch.FathomStats.FathomStatus;
import edu.jhu.hltcoe.math.Vectors;
import edu.jhu.hltcoe.util.Time;
import edu.jhu.hltcoe.util.Utilities;
/**
* For a maximization problem, this performs eager (as opposed to lazy) branch
* and bound.
*
* The SCIP thesis section 6.3 notes that
* "Usually, the child nodes inherit the dual bound of their parent node", so
* maybe we should switch to lazy branch and bound.
*/
public class LazyBranchAndBoundSolver {
private static final Logger log = Logger.getLogger(LazyBranchAndBoundSolver.class);
public enum SearchStatus {
OPTIMAL_SOLUTION_FOUND, NON_OPTIMAL_SOLUTION_FOUND
}
public static final double WORST_SCORE = Double.NEGATIVE_INFINITY;
public static final double BEST_SCORE = Double.POSITIVE_INFINITY;
protected double incumbentScore;
protected Solution incumbentSolution;
protected SearchStatus status;
// Storage of active nodes
protected final NodeOrderer leafNodePQ;
protected final PriorityQueue<ProblemNode> upperBoundPQ;
protected final double epsilon;
protected final double timeoutSeconds;
protected Stopwatch nodeTimer;
protected Stopwatch switchTimer;
protected Stopwatch relaxTimer;
protected Stopwatch feasTimer;
protected Stopwatch branchTimer;
// If true, fathoming is disabled. This enables random sampling of the
// branch and bound tree.
protected boolean disableFathoming;
public LazyBranchAndBoundSolver(double epsilon, NodeOrderer leafNodeOrderer, double timeoutSeconds) {
this.epsilon = epsilon;
this.leafNodePQ = leafNodeOrderer;
this.upperBoundPQ = new PriorityQueue<ProblemNode>(11, new BfsComparator());
this.timeoutSeconds = timeoutSeconds;
this.disableFathoming = false;
// Timers
nodeTimer = new Stopwatch();
switchTimer = new Stopwatch();
relaxTimer = new Stopwatch();
feasTimer = new Stopwatch();
branchTimer = new Stopwatch();
}
public SearchStatus runBranchAndBound(ProblemNode rootNode) {
return runBranchAndBound(rootNode, null, WORST_SCORE);
}
public SearchStatus runBranchAndBound(ProblemNode rootNode, Solution initialSolution, double initialScore) {
// Initialize
this.incumbentSolution = initialSolution;
this.incumbentScore = initialScore;
double upperBound = BEST_SCORE;
status = SearchStatus.NON_OPTIMAL_SOLUTION_FOUND;
leafNodePQ.clear();
upperBoundPQ.clear();
int numProcessed = 0;
FathomStats fathom = new FathomStats();
addToLeafNodes(rootNode);
double rootLogSpace = rootNode.getLogSpace();
double logSpaceRemain = rootLogSpace;
ProblemNode curNode = null;
evalIncumbent(initialSolution);
while (hasNextLeafNode()) {
if (nodeTimer.isRunning()) { nodeTimer.stop(); }
nodeTimer.start();
// The upper bound can only decrease
if (upperBoundPQ.peek().getOptimisticBound() > upperBound + 1e-8) {
log.warn(String.format("Upper bound should be strictly decreasing: peekUb = %e\tprevUb = %e", upperBoundPQ.peek().getOptimisticBound(), upperBound));
}
upperBound = upperBoundPQ.peek().getOptimisticBound();
assert (!Double.isNaN(upperBound));
numProcessed++;
double relativeDiff = computeRelativeDiff(upperBound, incumbentScore);
if (relativeDiff <= epsilon) {
// Optimal solution found.
break;
} else if (Time.totSec(nodeTimer) > timeoutSeconds) {
// Timeout reached.
break;
}
// Logging.
printSummary(upperBound, relativeDiff, numProcessed, fathom);
if (log.isDebugEnabled() && numProcessed % 100 == 0) {
printLeafNodeBoundHistogram();
printTimers(numProcessed);
printSpaceRemaining(numProcessed, rootLogSpace, logSpaceRemain);
}
// Process the next node.
curNode = getNextLeafNode();
NodeResult result = processNode(curNode, numProcessed);
fathom.fathom(curNode, result.status);
if (result.status != FathomStatus.NotFathomed) {
logSpaceRemain = Utilities.logSubtractExact(logSpaceRemain, curNode.getLogSpace());
}
for (ProblemNode childNode : result.children) {
addToLeafNodes(childNode);
}
}
if (nodeTimer.isRunning()) { nodeTimer.stop(); }
// Print summary
evalIncumbent(incumbentSolution);
double relativeDiff = computeRelativeDiff(upperBound, incumbentScore);
if (relativeDiff <= epsilon) {
status = SearchStatus.OPTIMAL_SOLUTION_FOUND;
}
printSummary(upperBound, relativeDiff, numProcessed, fathom);
printTimers(numProcessed);
leafNodePQ.clear();
upperBoundPQ.clear();
log.info("B&B search status: " + status);
// Return epsilon optimal solution
return status;
}
public static class NodeResult {
public FathomStatus status;
public List<ProblemNode> children;
public NodeResult(FathomStatus status) {
this.status = status;
this.children = Collections.emptyList();
}
public NodeResult(FathomStatus status, List<ProblemNode> children) {
this.status = status;
this.children = children;
}
}
protected NodeResult processNode(ProblemNode curNode, int numProcessed) {
switchTimer.start();
curNode.setAsActiveNode();
switchTimer.stop();
curNode.updateTimeRemaining(timeoutSeconds - Time.totSec(nodeTimer));
// TODO: else if, ran out of memory or disk space, break
// The active node can compute a tighter upper bound instead of
// using its parent's bound
relaxTimer.start();
double curNodeLowerBound;
if (disableFathoming) {
// If not fathoming, don't stop the relaxation early.
curNodeLowerBound = curNode.getOptimisticBound();
} else {
curNodeLowerBound = curNode.getOptimisticBound(incumbentScore);
}
RelaxedSolution relax = curNode.getRelaxedSolution();
relaxTimer.stop();
log.info(String.format("CurrentNode: id=%d depth=%d side=%d relaxScore=%f relaxStatus=%s incumbScore=%f avgNodeTime=%f", curNode.getId(),
curNode.getDepth(), curNode.getSide(), relax.getScore(), relax.getStatus().toString(), incumbentScore, Time.totMs(nodeTimer) / numProcessed));
if (curNodeLowerBound <= incumbentScore && !disableFathoming) {
// Fathom this node: it is either infeasible or was pruned.
if (relax.getStatus() == RelaxStatus.Infeasible) {
return new NodeResult(FathomStatus.Infeasible);
} else if (relax.getStatus() == RelaxStatus.Pruned) {
return new NodeResult(FathomStatus.Pruned);
} else {
log.warn("Unhandled status for relaxed solution: " + relax.getStatus() + " Treating as pruned.");
return new NodeResult(FathomStatus.Pruned);
}
}
// Check if the child node offers a better feasible solution
feasTimer.start();
Solution sol = curNode.getFeasibleSolution();
assert (sol == null || !Double.isNaN(sol.getScore()));
if (sol != null && sol.getScore() > incumbentScore) {
incumbentScore = sol.getScore();
incumbentSolution = sol;
evalIncumbent(incumbentSolution);
// TODO: pruneActiveNodes();
// We could store a priority queue in the opposite order (or
// just a sorted list)
// and remove nodes from it while their optimisticBound is
// worse than the
// new incumbentScore.
}
feasTimer.stop();
if (sol != null && Utilities.equals(sol.getScore(), relax.getScore(), 1e-13) && !disableFathoming) {
// Fathom this node: the optimal solution for this subproblem was found.
return new NodeResult(FathomStatus.CompletelySolved);
}
branchTimer.start();
List<ProblemNode> children = curNode.branch();
if (children.size() == 0) {
// Fathom this node: no more branches can be made.
return new NodeResult(FathomStatus.BottomedOut);
}
branchTimer.stop();
return new NodeResult(FathomStatus.NotFathomed, children);
}
private static double computeRelativeDiff(double upperBound, double lowerBound) {
// TODO: This is incorrect if the bounds are positive.
return Math.abs(upperBound - lowerBound) / Math.abs(lowerBound);
}
private void printSummary(double upperBound, double relativeDiff, int numProcessed, FathomStats fathom) {
int numFathomed = fathom.getNumFathomed();
log.info(String.format("Summary: upBound=%f lowBound=%f relativeDiff=%f #leaves=%d #fathom=%d #prune=%d #infeasible=%d avgFathomDepth=%.0f #seen=%d",
upperBound, incumbentScore, relativeDiff, leafNodePQ.size(), numFathomed, fathom.numPruned, fathom.numInfeasible, fathom.getAverageDepth(), numProcessed));
}
/**
* Override this method.
*/
protected void evalIncumbent(Solution incumbentSolution) {
return;
}
private boolean hasNextLeafNode() {
return !leafNodePQ.isEmpty();
}
private ProblemNode getNextLeafNode() {
ProblemNode node = leafNodePQ.remove();
upperBoundPQ.remove(node);
return node;
}
private void addToLeafNodes(ProblemNode node) {
leafNodePQ.add(node);
if (disableFathoming && upperBoundPQ.size() > 0) {
// This is a hack to ensure that we don't populate the upperBoundPQ.
return;
} else {
upperBoundPQ.add(node);
}
}
public Solution getIncumbentSolution() {
return incumbentSolution;
}
public double getIncumbentScore() {
return incumbentScore;
}
public void setDisableFathoming(boolean disableFathoming) {
this.disableFathoming = disableFathoming;
}
private void printSpaceRemaining(int numProcessed, double rootLogSpace, double logSpaceRemain) {
// Print stats about the space remaining.
log.info("Log space remaining (sub): " + logSpaceRemain);
// TODO: Maybe remove. This is slow and causes a NullPointerException.
// if (numProcessed % 2 == 0) {
// double logSpaceRemainAdd = computeLogSpaceRemain();
// log.info("Log space remaining (add): " + logSpaceRemainAdd);
// if (!Utilities.equals(logSpaceRemain, logSpaceRemainAdd, 1e-4)) {
// log.warn("Log space remaining differs between subtraction and addition versions.");
log.info("Space remaining: " + Utilities.exp(logSpaceRemain));
log.info("Proportion of root space remaining: " + Utilities.exp(logSpaceRemain - rootLogSpace));
}
protected void printTimers(int numProcessed) {
// Print timers.
log.debug("Avg time(ms) per node: " + Time.totMs(nodeTimer) / numProcessed);
log.debug("Avg switch time(ms) per node: " + Time.totMs(switchTimer) / numProcessed);
log.debug("Avg relax time(ms) per node: " + Time.totMs(relaxTimer) / numProcessed);
log.debug("Avg project time(ms) per node: " + Time.totMs(feasTimer) / numProcessed);
log.debug("Avg branch time(ms) per node: " + Time.totMs(branchTimer) / numProcessed);
}
private void printLeafNodeBoundHistogram() {
// Print Histogram
double[] bounds = new double[leafNodePQ.size()];
int i = 0;
for (ProblemNode node : leafNodePQ) {
bounds[i] = node.getOptimisticBound();
i++;
}
log.debug(getHistogram(bounds));
}
/**
* This VERY SLOWLY computes the log space remaining by
* adding up all the bounds of the leaf nodes.
*/
private double computeLogSpaceRemain() {
double logSpaceRemain = Double.NEGATIVE_INFINITY;
for (ProblemNode node : leafNodePQ) {
node.setAsActiveNode();
logSpaceRemain = Utilities.logAdd(logSpaceRemain, node.getLogSpace());
}
return logSpaceRemain;
}
private String getHistogram(double[] bounds) {
int numBins = 10;
double max = Vectors.max(bounds);
double min = Vectors.min(bounds);
double binWidth = (max - min) / numBins;
int[] hist = new int[numBins];
for (int i = 0; i < bounds.length; i++) {
int idx = (int) ((bounds[i] - min) / binWidth);
if (idx == hist.length) {
idx
}
hist[idx]++;
}
StringBuilder sb = new StringBuilder();
sb.append(String.format("histogram: min=%f max=%f\n", min, max));
for (int i=0; i<hist.length; i++) {
sb.append(String.format("\t[%.3f, %.3f) : %d\n", binWidth*i + min, binWidth*(i+1) + min, hist[i]));
}
return sb.toString();
}
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.*;
import edu.wpi.first.wpilibj.templates.Shooter;
import edu.wpi.first.wpilibj.templates.Team3373;
/**
*
* @author Philip2
*/
public class Shooter_underneath extends Shooter {
public void RPMTarget(double a){ //defines target based on input. Appeaers to be better than speed increase. can probbaly be used in place of a bunch of code.
if (shootA){
target = ((RPMModifier *ShooterSpeedScale) + currentRPMT2) * a;
StageTwoTalon.set(target);
} else if (shootB){
target = (( -RPMModifier * ShooterSpeedScale) + currentRPMT2) * a;
StageTwoTalon.set(target);
}
}
} |
package com.openxc;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import junit.framework.Assert;
import org.apache.commons.io.FileUtils;
import android.content.Intent;
import android.test.ServiceTestCase;
import com.openxc.measurements.HeadlampStatus;
import com.openxc.measurements.Measurement;
import com.openxc.measurements.ParkingBrakeStatus;
import com.openxc.remote.VehicleService;
import com.openxc.sources.trace.TraceVehicleDataSource;
public class SporadicDataTest extends ServiceTestCase<VehicleManager> {
VehicleManager service;
int headlampStatusCount = 0;
int parkingBrakeStatusCount = 0;
URI traceUri;
TraceVehicleDataSource source;
HeadlampStatus.Listener headlampListener = new HeadlampStatus.Listener() {
public void receive(Measurement measurement) {
headlampStatusCount += 1;
}
};
ParkingBrakeStatus.Listener parkingBrakeListener =
new ParkingBrakeStatus.Listener() {
public void receive(Measurement measurement) {
parkingBrakeStatusCount += 1;
}
};
public SporadicDataTest() {
super(VehicleManager.class);
}
private URI copyToStorage(int resource, String filename) {
URI uri = null;
try {
uri = new URI("file:///sdcard/com.openxc/" + filename);
} catch(URISyntaxException e) {
Assert.fail("Couldn't construct resource URIs: " + e);
}
try {
FileUtils.copyInputStreamToFile(
getContext().getResources().openRawResource(resource),
new File(uri));
} catch(IOException e) {
Assert.fail("Couldn't copy trace files to SD card" + e);
}
return uri;
}
@Override
protected void setUp() throws Exception {
super.setUp();
traceUri = copyToStorage(R.raw.slowtrace, "slowtrace.json");
// if the service is already running (and thus may have old data
// cached), kill it.
getContext().stopService(new Intent(getContext(), VehicleService.class));
Intent startIntent = new Intent();
startIntent.setClass(getContext(), VehicleManager.class);
service = ((VehicleManager.VehicleBinder)
bindService(startIntent)).getService();
service.waitUntilBound();
service.addListener(HeadlampStatus.class, headlampListener);
service.addListener(ParkingBrakeStatus.class, parkingBrakeListener);
source = new TraceVehicleDataSource(getContext(), traceUri, false);
service.addSource(source);
}
@Override
protected void tearDown() throws Exception {
if(source != null) {
source.stop();
}
super.tearDown();
}
} |
package util.xhtml;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.StringReader;
import javax.imageio.ImageIO;
import javax.swing.JPanel;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.w3c.dom.Document;
import org.w3c.tidy.Tidy;
import org.xhtmlrenderer.extend.UserAgentCallback;
import org.xhtmlrenderer.resource.ImageResource;
import org.xhtmlrenderer.resource.XMLResource;
import org.xhtmlrenderer.simple.Graphics2DRenderer;
import org.xhtmlrenderer.swing.NaiveUserAgent;
import org.xml.sax.InputSource;
import util.configuration.LocalResourceLoader;
import util.files.FileUtils;
/**
* A mess of code which is responsible for generating a graphical rendering of a game
* @author Ethan
*
*/
@SuppressWarnings("serial")
public class GameStateRenderPanel extends JPanel {
private static final Dimension defaultSize = new Dimension(600,600);
public static Dimension getDefaultSize()
{
return defaultSize;
}
public static void renderImagefromGameXML(String gameXML, String XSL, boolean isLocalVisualization, BufferedImage backimage)
{
Graphics2DRenderer r = new Graphics2DRenderer();
if (isLocalVisualization) {
r.getSharedContext().setUserAgentCallback(getUAC());
}
String xhtml = getXHTMLfromGameXML(gameXML, XSL);
xhtml = xhtml.replace("<?xml version=\"1.0\" encoding=\"UTF-8\"?>", "");
InputSource is = new InputSource(new BufferedReader(new StringReader(xhtml)));
Document dom = XMLResource.load(is).getDocument();
r.setDocument(dom, "http:
final Graphics2D g2 = backimage.createGraphics();
r.layout(g2, defaultSize);
r.render(g2);
}
private static String getXHTMLfromGameXML(String gameXML, String XSL) {
XSL = XSL.replace("<!DOCTYPE stylesheet [<!ENTITY ROOT \"http://games.ggp.org\">]>", "");
XSL = XSL.replace("&ROOT;", "http://games.ggp.org").trim();
IOString game = new IOString(gameXML);
IOString xslIOString = new IOString(XSL);
IOString content = new IOString("");
try {
TransformerFactory tFactory = TransformerFactory.newInstance();
Transformer transformer = tFactory.newTransformer(new StreamSource(xslIOString.getInputStream()));
transformer.transform(new StreamSource(game.getInputStream()),
new StreamResult(content.getOutputStream()));
} catch (Exception ex) {
ex.printStackTrace();
}
Tidy tidy = new Tidy();
tidy.setXHTML(true);
tidy.setShowWarnings(false);
tidy.setQuiet(true);
tidy.setDropEmptyParas(false);
IOString tidied = new IOString("");
tidy.parse(content.getInputStream(), tidied.getOutputStream());
return tidied.getString();
}
// Sharing UACs would probably help reduce resource usage,
// but I'm not sure about thread-safety of UAC (it seemed not to be).
private static UserAgentCallback getUAC() {
return getNewUAC();
}
private static UserAgentCallback getNewUAC() {
return new NaiveUserAgent() {
//TOdO:implement this with nio.
@SuppressWarnings("unchecked")
// TODO: _imageCache should be templatized properly so that warnings don't need to be suppressed
@Override
public ImageResource getImageResource(String uri)
{
ImageResource ir;
uri = resolveURI(uri);
ir = (ImageResource) _imageCache.get(uri);
if (ir != null) {
return ir;
}
// Couldn't load image from cache: need to fetch original source.
InputStream is = null;
String expectedPrefix = "http:
if (!uri.startsWith(expectedPrefix)) {
System.err.println("Unexpected prefix for image URI: " + uri);
return createImageResource(uri, null);
}
File localImg = new File(new File("games", "images"), uri.replace(expectedPrefix, ""));
// Ensure the image is present on the local disk.
boolean presentLocally = localImg.exists();
if (!presentLocally) {
return createImageResource(uri, null);
}
// Open a stream from the file on the local disk.
try {
is = new FileInputStream(localImg);
} catch(Exception ex) {
ex.printStackTrace();
}
if (is == null) {
return createImageResource(uri, null);
}
// Read the image from the stream.
try {
BufferedImage img = ImageIO.read(is);
if (img == null) {
System.err.println("ImageIO.read() returned null");
throw new IOException("ImageIO.read() returned null");
}
ir = createImageResource(uri, img);
_imageCache.put(uri, ir);
} catch (FileNotFoundException e) {
System.err.println("Can't read image file; image at URI '" + uri + "' not found");
} catch (IOException e) {
System.err.println("Can't read image file; unexpected problem for URI '" + uri + "': " + e);
} finally {
try {
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (ir != null) {
return ir;
}
// Couldn't fetch original image source; need to create stub.
return createImageResource(uri, null);
}
};
}
// Code to pull in XSL from local stylesheets
public static String getXSLfromFile(String theGameKey) {
File templateFile = new File(new File(new File("src", "util"), "xhtml"), "template.xsl");
String XSL = LocalResourceLoader.loadStylesheet(theGameKey);
String template = FileUtils.readFileAsString(templateFile);
return template.replace("
}
private static class IOString
{
private StringBuffer buf;
public IOString(String s) {
buf = new StringBuffer(s);
}
public String getString() {
return buf.toString();
}
public InputStream getInputStream() {
return new IOString.IOStringInputStream();
}
public OutputStream getOutputStream() {
return new IOString.IOStringOutputStream();
}
class IOStringInputStream extends java.io.InputStream {
private int position = 0;
public int read() throws java.io.IOException
{
if (position < buf.length()) {
return buf.charAt(position++);
} else {
return -1;
}
}
}
class IOStringOutputStream extends java.io.OutputStream {
public void write(int character) throws java.io.IOException {
buf.append((char)character);
}
}
}
} |
package projecteuler.problem;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.Assert;
import projecteuler.ProblemTemplate;
public class Problem30 extends ProblemTemplate {
@Override
public String getTitle() {
return "Digit fifth powers";
}
@Override
public String getResult() {
Assert.assertTrue(validNumber(1634, Arrays.asList(1, 6, 3, 4)));
Assert.assertTrue(validNumber(1634, Arrays.asList(6, 1, 3, 4)));
Assert.assertFalse(validNumber(1634, Arrays.asList(9, 1, 3, 4)));
Assert.assertTrue(validNumber(8208, Arrays.asList(8, 2, 0, 8)));
Assert.assertFalse(validNumber(4434, Arrays.asList(1, 3, 4, 8)));
Assert.assertEquals(19316, getSumOfNum2(4));
return String.valueOf(getSumOfNum2(5));
}
private int getSumOfNum2(int pow) {
int total = 0;
int begin = (int) (Math.pow(10, pow - 1));
int end = (int) (Math.pow(10, pow) - 1);
while (begin++ <= end) {
List<Integer> numbers = split(begin);
int sumOfPowers = 0;
for (Integer i : numbers) {
sumOfPowers += Math.pow(i, pow);
}
if (sumOfPowers == begin) {
System.out.println(sumOfPowers + " " + numbers + " " + pow);
total += sumOfPowers;
}
}
return total;
}
private List<Integer> split(int begin) {
List<Integer> list = new ArrayList<>();
int x = begin;
while (x != 0) {
list.add(x % 10);
x = x / 10;
}
return list;
}
@SuppressWarnings("unused")
private int getSumOfNum(int pow) {
int[] array = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
int[] code = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
int total = 0;
while (next(code)) {
List<Integer> combination = getCombination(array, code);
if (combination.size() == 1) {
continue;
}
int sumOfPowers = 0;
for (Integer i : combination) {
sumOfPowers += Math.pow(i, pow);
}
if (validNumber(sumOfPowers, combination)) {
System.out.println(combination + " " + sumOfPowers);
total += sumOfPowers;
}
}
throw new UnsupportedOperationException("Incorrect implementation");
}
private List<Integer> getCombination(int[] array, int[] code) {
List<Integer> combination = new ArrayList<>(array.length);
for (int i = 0; i < code.length; i++) {
if (code[i] != 0) {
combination.add(array[i]);
}
}
return combination;
}
private boolean next(int[] code) {
for (int i = code.length - 1; i >= 0; i
if (code[i] != 0) {
continue;
}
for (int j = i - 1; j >= 0; j
if (code[j] == 1) {
code[j] = 0;
code[j + 1] = 1;
move1ToHead(code, j + 2);
return true;
}
}
code[0] = 1;
move1ToHead(code, 1);
return true;
}
return false;
}
private void move1ToHead(int[] code, int range) {
for (int i = code.length - 1; i > range; i
if (code[i] == 0) {
continue;
}
for (int j = range; j < i; j++) {
if (code[j] == 0) {
code[j] = 1;
code[i] = 0;
break;
}
}
}
}
private boolean validNumber(int n, List<Integer> list) {
int _n = n;
List<Integer> _list = new ArrayList<>(list);
int c = 0;
int size = _list.size();
while (_n > 0) {
int y = _n - _n / 10 * 10;
_n = _n / 10;
c++;
if (!_list.remove(Integer.valueOf(y))) {
return false;
}
}
return c == size;
}
} |
package com.psddev.dari.util;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** IO utility methods. */
public final class IoUtils {
private static final int BUFFER_SIZE = 0x1000;
private static final Logger LOGGER = LoggerFactory.getLogger(IoUtils.class);
/**
* Closes the given {@code closeable}.
*
* @param closeable If {@code null}, does nothing.
* @param suppressError If {@code true}, logs the error instead of throwing it.
*/
public static void close(Closeable closeable, boolean suppressError) throws IOException {
if (closeable == null) {
return;
}
try {
closeable.close();
} catch (IOException error) {
if (suppressError) {
if (LOGGER.isWarnEnabled()) {
LOGGER.warn("Can't close [" + closeable + "]!", error);
}
} else {
throw error;
}
}
}
/**
* Closes the given {@code closeable}, logging any errors that occur
* instead of throwing them.
*
* @param closeable If {@code null}, does nothing.
*/
public static void closeQuietly(Closeable closeable) {
try {
close(closeable, true);
} catch (IOException error) {
// This should never trigger with #close(suppressError = true).
}
}
/**
* Copies from the given {@code source} to the given {@code destination}.
* Doesn't close either streams.
*
* @param source Can't be {@code null}.
* @param destination Can't be {@code null}.
* @return Number of bytes copied.
*/
public static long copy(InputStream source, OutputStream destination) throws IOException {
byte[] buffer = new byte[BUFFER_SIZE];
long total = 0L;
for (int read; (read = source.read(buffer)) > -1; ) {
destination.write(buffer, 0, read);
total += read;
}
return total;
}
/**
* Copies the given {@code source} to the given {@code destination}.
*
* @param source Can't be {@code null}.
* @param destination Can't be {@code null}.
* @return Number of bytes copied.
*/
public static long copy(File source, File destination) throws IOException {
createFile(destination);
FileInputStream sourceInput = new FileInputStream(source);
try {
FileOutputStream destinationOutput = new FileOutputStream(destination);
try {
return copy(sourceInput, destinationOutput);
} finally {
destinationOutput.close();
}
} finally {
sourceInput.close();
}
}
/**
* Returns all bytes from the given {@code input}. Doesn't close
* the stream.
*
* @param input Can't be {@code null}.
* @return Never {@code null}.
*/
public static byte[] toByteArray(InputStream input) throws IOException {
ByteArrayOutputStream output = new ByteArrayOutputStream();
copy(input, output);
return output.toByteArray();
}
/**
* Returns all bytes from the given {@code file}.
*
* @param file Can't be {@code null}.
* @return Never {@code null}.
*/
public static byte[] toByteArray(File file) throws IOException {
InputStream input = new FileInputStream(file);
try {
return toByteArray(input);
} finally {
closeQuietly(input);
}
}
/**
* Returns all bytes from the given {@code url}.
*
* @param url Can't be {@code null}.
* @return Never {@code null}.
*/
public static byte[] toByteArray(URL url) throws IOException {
InputStream input = url.openStream();
try {
return toByteArray(input);
} finally {
closeQuietly(input);
}
}
/**
* Returns a file equivalent to the given {@code url}.
*
* @param url Can be {@code null}.
* @param charset Can't be {@code null}.
* @return {@code null} if the given {@code url} is {@code null} or
* doesn't point to a file.
*/
public static File toFile(URL url, Charset charset) {
if (url == null || !"file".equalsIgnoreCase(url.getProtocol())) {
return null;
}
byte[] encoded = url.getFile().replace('/', File.separatorChar).getBytes(StringUtils.US_ASCII);
int length = encoded.length;
byte[] decoded = new byte[length];
int decodedIndex = 0;
for (int i = 0; i < length; ++ i) {
byte letter = encoded[i];
if (letter == '%') {
++ i;
if (i < length) {
byte hex1 = HEX_TO_BYTE[encoded[i]];
if (hex1 >= 0) {
++ i;
if (i < length) {
byte hex2 = HEX_TO_BYTE[encoded[i]];
if (hex2 >= 0) {
decoded[decodedIndex] = (byte) (hex1 << 4 | hex2);
++ decodedIndex;
continue;
}
}
}
}
}
decoded[decodedIndex] = letter;
++ decodedIndex;
}
return new File(new String(decoded, 0, decodedIndex, charset));
}
private static final byte[] HEX_TO_BYTE;
static {
int length = Byte.MAX_VALUE - Byte.MIN_VALUE;
HEX_TO_BYTE = new byte[length];
for (int i = 0; i < length; ++ i) {
HEX_TO_BYTE[i] = -1;
}
for (int i = 0; i < 0x10; ++ i) {
HEX_TO_BYTE[Integer.toHexString(i).charAt(0)] = (byte) i;
}
}
/**
* Reads all bytes from the given {@code input} and converts them
* into a string using the given {@code charset}.
*
* @param input Can't be {@code null}.
* @param charset Can't be {@code null}.
* @return Never {@code null}.
*/
public static String toString(InputStream input, Charset charset) throws IOException {
return new String(toByteArray(input), charset);
}
/**
* Reads all bytes from the given {@code file} and converts them
* into a string using the given {@code charset}.
*
* @param file Can't be {@code null}.
* @param charset Can't be {@code null}.
* @return Never {@code null}.
*/
public static String toString(File file, Charset charset) throws IOException {
return new String(toByteArray(file), charset);
}
/**
* Reads all bytes from the given {@code url} and converts them
* into a string using the response content encoding. If the encoding
* isn't provided, uses {@link StringUtils#UTF_8} instead.
*
* @param url Can't be {@code null}.
* @return Never {@code null}.
*/
public static String toString(URL url, int millis) throws IOException {
URLConnection connection = url.openConnection();
if (millis > 0) {
connection.setConnectTimeout(millis);
connection.setReadTimeout(millis);
}
InputStream input = connection.getInputStream();
try {
String encoding = connection.getContentEncoding();
Charset charset;
if (encoding == null) {
charset = StringUtils.UTF_8;
} else {
try {
charset = Charset.forName(encoding);
} catch (IllegalCharsetNameException error) {
throw new IOException(error);
}
}
return new String(toByteArray(input), charset);
} finally {
closeQuietly(input);
}
}
public static String toString(URL url) throws IOException {
return toString(url, -1);
}
/**
* Creates all directories leading up to and including the given
* {@code directory} if any of them doesn't exist.
*
* @param directory Can't be {@code null}.
* @throws IOException If any of the directories couldn't be created.
*/
public static void createDirectories(File directory) throws IOException {
if (directory.exists() && !directory.isDirectory()) {
throw new IOException("[" + directory + "] already exists but isn't a directory!");
} else if (!directory.mkdirs() && !directory.isDirectory()) {
throw new IOException("Can't create [" + directory + "] directory!");
}
}
/**
* Creates all the parent directories leading up to the given
* {@code fileOrDirectory} if any of them doesn't exist.
*
* @param fileOrDirectory Can't be {@code null}.
* @throws IOException If any of the parent directories couldn't be
* created.
*/
public static void createParentDirectories(File fileOrDirectory) throws IOException {
createDirectories(fileOrDirectory.getParentFile());
}
/**
* Creates the given {@code file} if it doesn't exist. This method will
* also create all the parent directories leading up to the given
* {@code file} using {@link #createParentDirectories}.
*
* @param file Can't be {@code null}.
* @throws IOException If the given {@code file} couldn't be created.
*/
public static void createFile(File file) throws IOException {
createParentDirectories(file);
if (!file.createNewFile() &&
!file.isFile()) {
throw new IOException("[" + file + "] already exists but isn't a file!");
}
}
/**
* Renames the given {@code source} to {@code destination}.
*
* @param source Can't be {@code null}.
* @param destination Can't be {@code null}.
* @throws IOException If the given {@code source} couldn't be renamed.
*/
public static void rename(File source, File destination) throws IOException {
if (!source.renameTo(destination)) {
throw new IOException("[" + source + "] can't be renamed to [" + destination + "]!");
}
}
/**
* Deletes the given {@code fileOrDirectory} if it exists.
*
* @param fileOrDirectory If {@code null}, does nothing.
* @throws IOException If the given {@code file} couldn't be deleted.
*/
public static void delete(File fileOrDirectory) throws IOException {
if (fileOrDirectory != null &&
fileOrDirectory.exists() &&
!fileOrDirectory.delete() &&
fileOrDirectory.exists()) {
throw new IOException("Can't delete [" + fileOrDirectory + "]!");
}
}
} |
package com.castlabs.dash.dashfragmenter.sequences;
import com.castlabs.dash.dashfragmenter.ExitCodeException;
import com.castlabs.dash.dashfragmenter.formats.csf.DashBuilder;
import com.castlabs.dash.dashfragmenter.formats.csf.SegmentBaseSingleSidxManifestWriterImpl;
import com.castlabs.dash.dashfragmenter.formats.csf.WrappingTrack;
import com.castlabs.dash.dashfragmenter.formats.multiplefilessegementtemplate.ExplodedSegmentListManifestWriterImpl;
import com.castlabs.dash.dashfragmenter.formats.multiplefilessegementtemplate.SingleSidxExplode;
import com.coremedia.iso.boxes.Box;
import com.coremedia.iso.boxes.CompositionTimeToSample;
import com.coremedia.iso.boxes.Container;
import com.coremedia.iso.boxes.SampleDescriptionBox;
import com.coremedia.iso.boxes.sampleentry.AudioSampleEntry;
import com.googlecode.mp4parser.FileDataSourceImpl;
import com.googlecode.mp4parser.authoring.*;
import com.googlecode.mp4parser.authoring.builder.*;
import com.googlecode.mp4parser.authoring.container.mp4.MovieCreator;
import com.googlecode.mp4parser.authoring.tracks.*;
import com.googlecode.mp4parser.boxes.mp4.ESDescriptorBox;
import com.googlecode.mp4parser.boxes.mp4.objectdescriptors.AudioSpecificConfig;
import com.googlecode.mp4parser.util.Path;
import mpegDashSchemaMpd2011.MPDDocument;
import org.apache.xmlbeans.XmlOptions;
import javax.crypto.SecretKey;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.WritableByteChannel;
import java.security.cert.X509Certificate;
import java.util.*;
import java.util.logging.Logger;
public class DashFileSetSequence {
Logger l;
static Set<String> supportedTypes = new HashSet<String>(Arrays.asList("ac-3", "ec-3", "dtsl", "dtsh", "dtse", "avc1", "mp4a", "h264"));
protected SecretKey key;
protected UUID keyid;
protected List<X509Certificate> certificates;
protected List<File> inputFiles;
protected File outputDirectory = new File("");
protected boolean explode = false;
public void setKey(SecretKey key) {
this.key = key;
}
public void setKeyid(UUID keyid) {
this.keyid = keyid;
}
public void setCertificates(List<X509Certificate> certificates) {
this.certificates = certificates;
}
public void setInputFiles(List<File> inputFiles) {
this.inputFiles = inputFiles;
}
public void setOutputDirectory(File outputDirectory) {
this.outputDirectory = outputDirectory;
}
public void setExplode(boolean explode) {
this.explode = explode;
}
public void setLogger(Logger l) {
this.l = l;
}
public int run() throws IOException, ExitCodeException {
if (!(outputDirectory.getAbsoluteFile().exists() ^ outputDirectory.getAbsoluteFile().mkdirs())) {
l.severe("Output directory does not exist and cannot be created.");
}
long start = System.currentTimeMillis();
Map<Track, String> track2File = createTracks();
track2File = alignEditsToZero(track2File);
track2File = fixAppleOddity(track2File);
track2File = encryptTracks(track2File);
// sort by language and codec
Map<String, List<Track>> trackFamilies = findTrackFamilies(track2File.keySet());
// Track sizes are expensive to calculate -> save them for later
Map<Track, Long> trackSizes = calculateTrackSizes(trackFamilies);
// sort within the track families by size to get stable output
sortTrackFamilies(trackFamilies, trackSizes);
// calculate the fragment start samples once & save them for later
Map<Track, long[]> trackStartSamples = findFragmentStartSamples(trackFamilies);
// calculate bitrates
Map<Track, Long> trackBitrate = calculateBitrate(trackFamilies, trackSizes);
// generate filenames for later reference
Map<Track, String> trackFilename = generateFilenames(track2File);
// export the dashed single track MP4s
Map<Track, Container> dashedFiles = createSingleTrackDashedMp4s(trackStartSamples, trackFilename);
if (!explode) {
writeFilesSingleSidx(trackFilename, dashedFiles);
writeManifestSingleSidx(trackFamilies, trackBitrate, trackFilename, dashedFiles);
} else {
String mediaPattern = "$RepresentationID$/media-$Time$.mp4";
String initPattern = "$RepresentationID$/init.mp4";
Map<Track, List<File>> trackToSegments =
writeFilesExploded(trackFilename, dashedFiles, trackBitrate, outputDirectory, initPattern, mediaPattern);
writeManifestExploded(trackFamilies, trackBitrate, trackFilename, dashedFiles, trackToSegments, outputDirectory, initPattern, mediaPattern);
}
l.info("Finished write in " + (System.currentTimeMillis() - start) + "ms");
return 0;
}
public Map<Track, List<File>> writeFilesExploded(
Map<Track, String> trackFilename,
Map<Track, Container> dashedFiles,
Map<Track, Long> trackBitrate,
File outputDirectory,
String initPattern,
String mediaPattern) throws IOException {
Map<Track, List<File>> trackToSegments = new HashMap<Track, List<File>>();
for (Track t : trackFilename.keySet()) {
String filename = trackFilename.get(t);
l.info("Writing... " + filename + "/...");
File targetDir = new File(outputDirectory, filename);
if (!(targetDir.getAbsoluteFile().exists() ^ targetDir.getAbsoluteFile().mkdirs())) {
l.severe("target directory " + targetDir + " does not exist and cannot be created.");
}
SingleSidxExplode singleSidxExplode = new SingleSidxExplode();
List<File> segments = new ArrayList<File>();
singleSidxExplode.doIt(
trackFilename.get(t),
dashedFiles.get(t),
trackBitrate.get(t), segments, outputDirectory, initPattern, mediaPattern);
l.info("Done.");
trackToSegments.put(t, segments);
}
return trackToSegments;
}
public void writeFilesSingleSidx(Map<Track, String> trackFilename, Map<Track, Container> dashedFiles) throws IOException {
for (Map.Entry<Track, Container> trackContainerEntry : dashedFiles.entrySet()) {
l.info("Writing... ");
WritableByteChannel wbc = new FileOutputStream(
new File(outputDirectory, trackFilename.get(trackContainerEntry.getKey()))).getChannel();
try {
List<Box> boxes = trackContainerEntry.getValue().getBoxes();
for (int i = 0; i < boxes.size(); i++) {
l.fine("Writing... " + boxes.get(i).getType() + " [" + i + " of " + boxes.size() + "]");
boxes.get(i).getBox(wbc);
}
} finally {
wbc.close();
}
l.info("Done.");
}
}
public Mp4Builder getFileBuilder(FragmentIntersectionFinder fragmentIntersectionFinder, Movie m) {
DashBuilder dashBuilder = new DashBuilder();
dashBuilder.setIntersectionFinder(fragmentIntersectionFinder);
return dashBuilder;
}
public Map<Track, Container> createSingleTrackDashedMp4s(
Map<Track, long[]> fragmentStartSamples,
Map<Track, String> filenames) throws IOException {
HashMap<Track, Container> containers = new HashMap<Track, Container>();
for (final Map.Entry<Track, long[]> trackEntry : fragmentStartSamples.entrySet()) {
String filename = filenames.get(trackEntry.getKey());
Movie movie = new Movie();
movie.addTrack(trackEntry.getKey());
l.info("Creating model for " + filename + "... ");
Mp4Builder mp4Builder = getFileBuilder(
new StaticFragmentIntersectionFinderImpl(fragmentStartSamples),
movie);
Container isoFile = mp4Builder.build(movie);
containers.put(trackEntry.getKey(), isoFile);
}
return containers;
}
public void sortTrackFamilies(Map<String, List<Track>> trackFamilies, final Map<Track, Long> sizes) {
for (List<Track> tracks : trackFamilies.values()) {
Collections.sort(tracks, new Comparator<Track>() {
public int compare(Track o1, Track o2) {
return (int) (sizes.get(o1) - sizes.get(o2));
}
});
}
}
/**
* Calculates approximate track size suitable for sorting & calculating bitrate but not suitable
* for precise calculations.
*
* @param trackFamilies all tracks grouped by their type.
* @return map from track to track's size
*/
public Map<Track, Long> calculateTrackSizes(Map<String, List<Track>> trackFamilies) {
HashMap<Track, Long> sizes = new HashMap<Track, Long>();
for (List<Track> tracks : trackFamilies.values()) {
for (Track track : tracks) {
long size = 0;
List<Sample> samples = track.getSamples();
for (int i = 0; i < Math.min(samples.size(), 10000); i++) {
size += samples.get(i).getSize();
}
size = (size / Math.min(track.getSamples().size(), 10000)) * track.getSamples().size();
sizes.put(track, size);
}
}
return sizes;
}
/**
* Calculates bitrate from sizes.
*
* @param trackFamilies all tracks grouped by their type.
* @param trackSize size per track
* @return bitrate per track
*/
public Map<Track, Long> calculateBitrate(Map<String, List<Track>> trackFamilies, Map<Track, Long> trackSize) {
HashMap<Track, Long> bitrates = new HashMap<Track, Long>();
for (List<Track> tracks : trackFamilies.values()) {
for (Track track : tracks) {
double duration = (double) track.getDuration() / track.getTrackMetaData().getTimescale();
long size = trackSize.get(track);
bitrates.put(track, (long) ((size * 8 / duration / 1000)) * 1000);
}
}
return bitrates;
}
/**
* Generates filenames from type, language and bitrate.
*
* @return a descriptive filename <code>type[-lang]-bitrate.mp4</code>
*/
public Map<Track, String> generateFilenames(Map<Track, String> trackOriginalFilename) {
HashMap<Track, String> filenames = new HashMap<Track, String>();
for (Track track : trackOriginalFilename.keySet()) {
String originalFilename = trackOriginalFilename.get(track);
originalFilename = originalFilename.replace(".mp4", "");
originalFilename = originalFilename.replace(".mov", "");
originalFilename = originalFilename.replace(".aac", "");
originalFilename = originalFilename.replace(".ec3", "");
originalFilename = originalFilename.replace(".ac3", "");
originalFilename = originalFilename.replace(".dtshd", "");
for (Track track1 : filenames.keySet()) {
if (track1 != track &&
trackOriginalFilename.get(track1).equals(trackOriginalFilename.get(track))) {
// ouch multiple tracks point to same file
originalFilename += "_" + track.getTrackMetaData().getTrackId();
}
}
if (!explode) {
filenames.put(track, String.format("%s.mp4", originalFilename));
} else {
filenames.put(track, originalFilename);
}
}
return filenames;
}
public Map<Track, long[]> findFragmentStartSamples(Map<String, List<Track>> trackFamilies) {
Map<Track, long[]> fragmentStartSamples = new HashMap<Track, long[]>();
for (String key : trackFamilies.keySet()) {
List<Track> tracks = trackFamilies.get(key);
Movie movie = new Movie();
movie.setTracks(tracks);
for (Track track : tracks) {
if (track.getHandler().startsWith("vide")) {
FragmentIntersectionFinder videoIntersectionFinder = new SyncSampleIntersectFinderImpl(movie, null, 2);
fragmentStartSamples.put(track, videoIntersectionFinder.sampleNumbers(track));
//fragmentStartSamples.put(track, checkMaxFragmentDuration(track, videoIntersectionFinder.sampleNumbers(track)));
} else if (track.getHandler().startsWith("soun")) {
FragmentIntersectionFinder soundIntersectionFinder = new TwoSecondIntersectionFinder(movie, 5);
fragmentStartSamples.put(track, soundIntersectionFinder.sampleNumbers(track));
} else {
throw new RuntimeException("An engineer needs to tell me if " + key + " is audio or video!");
}
}
}
return fragmentStartSamples;
}
/**
* Creates a Map with Track as key and originating filename as value.
*
* @return Track too originating file map
* @throws IOException
*/
public Map<Track, String> createTracks() throws IOException, ExitCodeException {
Map<Track, String> track2File = new HashMap<Track, String>();
for (File inputFile : inputFiles) {
if (inputFile.getName().endsWith(".mp4") ||
inputFile.getName().endsWith(".mov") ||
inputFile.getName().endsWith(".m4v")) {
Movie movie = MovieCreator.build(new FileDataSourceImpl(inputFile));
for (Track track : movie.getTracks()) {
String codec = track.getSampleDescriptionBox().getSampleEntry().getType();
if (!supportedTypes.contains(codec)) {
l.warning("Excluding " + inputFile + " track " + track.getTrackMetaData().getTrackId() + " as its codec " + codec + " is not yet supported");
break;
}
track2File.put(track, inputFile.getName());
}
} else if (inputFile.getName().endsWith(".aac")) {
Track track = new AACTrackImpl(new FileDataSourceImpl(inputFile));
track2File.put(track, inputFile.getName());
l.fine("Created AAC Track from " + inputFile.getName());
} else if (inputFile.getName().endsWith(".h264")) {
Track track = new H264TrackImpl(new FileDataSourceImpl(inputFile));
track2File.put(track, inputFile.getName());
l.fine("Created H264 Track from " + inputFile.getName());
} else if (inputFile.getName().endsWith(".ac3")) {
Track track = new AC3TrackImpl(new FileDataSourceImpl(inputFile));
track2File.put(track, inputFile.getName());
l.fine("Created AC3 Track from " + inputFile.getName());
} else if (inputFile.getName().endsWith(".ec3")) {
Track track = new EC3TrackImpl(new FileDataSourceImpl(inputFile));
track2File.put(track, inputFile.getName());
l.fine("Created EC3 Track from " + inputFile.getName());
} else if (inputFile.getName().endsWith(".dtshd")) {
Track track = new DTSTrackImpl(new FileDataSourceImpl(inputFile));
track2File.put(track, inputFile.getName());
l.fine("Created DTS HD Track from " + inputFile.getName());
} else {
l.severe("Cannot identify type of " + inputFile + ". Extensions mp4, mov, m4v, aac, ac3, ec3 or dtshd are known.");
throw new ExitCodeException("Cannot identify type of " + inputFile + ". Extensions mp4, mov, m4v, aac, ac3, ec3 or dtshd are known.", 1);
}
}
return track2File;
}
public Map<Track, String> encryptTracks(Map<Track, String> track2File) {
if (this.key != null && this.keyid != null) {
Map<Track, String> encTracks = new HashMap<Track, String>();
for (Map.Entry<Track, String> trackStringEntry : track2File.entrySet()) {
String hdlr = trackStringEntry.getKey().getHandler();
if ("vide".equals(hdlr) || "soun".equals(hdlr)) {
CencEncryptingTrackImpl cencTrack = new CencEncryptingTrackImpl(trackStringEntry.getKey(), keyid, key);
encTracks.put(cencTrack, trackStringEntry.getValue());
} else {
encTracks.put(trackStringEntry.getKey(), trackStringEntry.getValue());
}
}
track2File = encTracks;
}
return track2File;
}
public Map<Track, String> fixAppleOddity(Map<Track, String> track2File) {
Map<Track, String> nuTracks = new HashMap<Track, String>();
for (Map.Entry<Track, String> entry : track2File.entrySet()){
Track track = entry.getKey();
if (Path.getPath(track.getSampleDescriptionBox(), "...a/wave/esds") != null) {
final SampleDescriptionBox stsd = track.getSampleDescriptionBox();
AudioSampleEntry ase = (AudioSampleEntry)stsd.getSampleEntry();
List<Box> aseBoxes = new ArrayList<Box>();
aseBoxes.add(Path.getPath(stsd, "...a/wave/esds"));
for (Box box : ase.getBoxes()) {
if (!box.getType().equals("wave")) {
aseBoxes.add(box);
}
}
ase.setBoxes(Collections.<Box>emptyList());
for (Box aseBox : aseBoxes) {
ase.addBox(aseBox);
}
nuTracks.put(new StsdCorrectingTrack(track, stsd), entry.getValue());
} else {
nuTracks.put(entry.getKey(), entry.getValue());
}
}
return nuTracks;
}
public Map<Track, String> alignEditsToZero(Map<Track, String> tracks) {
Map<Track, String> result = new HashMap<Track, String>();
double earliestMoviePresentationTime = 0;
Map<Track,Double> startTimes = new HashMap<Track, Double>();
Map<Track,Double> ctsOffset = new HashMap<Track, Double>();
for (Track track : tracks.keySet()) {
boolean acceptEdit = true;
boolean acceptDwell = true;
List<Edit> edits = track.getEdits();
double earliestTrackPresentationTime = 0;
for (Edit edit : edits) {
if (edit.getMediaTime() == -1 && !acceptDwell) {
throw new RuntimeException("Cannot accept edit list for processing (1)");
}
if (edit.getMediaTime() >= 0 && !acceptEdit) {
throw new RuntimeException("Cannot accept edit list for processing (2)");
}
if (edit.getMediaTime() == -1) {
earliestTrackPresentationTime += edit.getSegmentDuration();
} else /* if edit.getMediaTime() >= 0 */ {
earliestTrackPresentationTime -= (double) edit.getMediaTime() / edit.getTimeScale();
acceptEdit = false;
acceptDwell = false;
}
}
if (track.getCompositionTimeEntries()!=null && track.getCompositionTimeEntries().size()>0) {
long currentTime = 0;
int[] ptss = Arrays.copyOfRange(CompositionTimeToSample.blowupCompositionTimes(track.getCompositionTimeEntries()), 0, 50);
for (int j = 0; j < ptss.length; j++) {
ptss[j] += currentTime;
currentTime += track.getSampleDurations()[j];
}
Arrays.sort(ptss);
earliestTrackPresentationTime += (double)ptss[0]/track.getTrackMetaData().getTimescale();
ctsOffset.put(track, (double)ptss[0]/track.getTrackMetaData().getTimescale());
} else {
ctsOffset.put(track, 0.0);
}
startTimes.put(track, earliestTrackPresentationTime);
earliestMoviePresentationTime = Math.min(earliestMoviePresentationTime, earliestTrackPresentationTime);
System.err.println(track.getName() + "'s starttime after edits: " + earliestTrackPresentationTime);
}
for (Track track : tracks.keySet()) {
double adjustedStartTime = startTimes.get(track) - earliestMoviePresentationTime - ctsOffset.get(track);
final List<Edit> edits = new ArrayList<Edit>();
if (adjustedStartTime < 0) {
edits.add(new Edit((long)(-adjustedStartTime * track.getTrackMetaData().getTimescale()), track.getTrackMetaData().getTimescale(), 1.0, (double) track.getDuration() / track.getTrackMetaData().getTimescale()));
} else if (adjustedStartTime > 0) {
edits.add(new Edit(-1, track.getTrackMetaData().getTimescale(), 1.0, adjustedStartTime));
edits.add(new Edit(0, track.getTrackMetaData().getTimescale(), 1.0, (double) track.getDuration() / track.getTrackMetaData().getTimescale()));
}
result.put(new WrappingTrack(track) {
@Override
public List<Edit> getEdits() {
return edits;
}
}, tracks.get(track));
}
return result;
}
public Map<String, List<Track>> findTrackFamilies(Set<Track> allTracks) throws IOException {
HashMap<String, List<Track>> trackFamilies = new HashMap<String, List<Track>>();
for (Track track : allTracks) {
String family = track.getSampleDescriptionBox().getSampleEntry().getType() + "-" + track.getTrackMetaData().getLanguage();
if ("mp4a".equals(track.getSampleDescriptionBox().getSampleEntry().getType())) {
// we need to look at actual channel configuration
ESDescriptorBox esds = track.getSampleDescriptionBox().getSampleEntry().getBoxes(ESDescriptorBox.class).get(0);
AudioSpecificConfig audioSpecificConfig = esds.getEsDescriptor().getDecoderConfigDescriptor().getAudioSpecificInfo();
family += "-" + audioSpecificConfig.getChannelConfiguration();
}
List<Track> tracks = trackFamilies.get(family);
if (tracks == null) {
tracks = new LinkedList<Track>();
trackFamilies.put(family, tracks);
}
tracks.add(track);
}
return trackFamilies;
}
public void writeManifestExploded(Map<String, List<Track>> trackFamilies,
Map<Track, Long> trackBitrate,
Map<Track, String> trackFilename,
Map<Track, Container> dashedFiles,
Map<Track, List<File>> trackToSegments,
File outputDirectory, String initPattern, String mediaPattern) throws IOException {
Map<Track, UUID> trackKeyIds = new HashMap<Track, UUID>();
for (List<Track> tracks : trackFamilies.values()) {
for (Track track : tracks) {
trackKeyIds.put(track, this.keyid);
}
}
MPDDocument mpdDocument =
new ExplodedSegmentListManifestWriterImpl(
trackFamilies, dashedFiles, trackBitrate, trackFilename,
trackKeyIds, trackToSegments, initPattern, mediaPattern).getManifest();
XmlOptions xmlOptions = new XmlOptions();
//xmlOptions.setUseDefaultNamespace();
HashMap<String, String> ns = new HashMap<String, String>();
//ns.put("urn:mpeg:DASH:schema:MPD:2011", "");
ns.put("urn:mpeg:cenc:2013", "cenc");
xmlOptions.setSaveSuggestedPrefixes(ns);
xmlOptions.setSaveAggressiveNamespaces();
xmlOptions.setUseDefaultNamespace();
xmlOptions.setSavePrettyPrint();
File manifest1 = new File(outputDirectory, "Manifest.mpd");
l.info("Writing " + manifest1 + "... ");
mpdDocument.save(manifest1, xmlOptions);
l.info("Done.");
}
public void writeManifestSingleSidx(Map<String, List<Track>> trackFamilies, Map<Track, Long> trackBitrate, Map<Track, String> trackFilename, Map<Track, Container> dashedFiles) throws IOException {
Map<Track, UUID> trackKeyIds = new HashMap<Track, UUID>();
for (List<Track> tracks : trackFamilies.values()) {
for (Track track : tracks) {
trackKeyIds.put(track, this.keyid);
}
}
SegmentBaseSingleSidxManifestWriterImpl dashManifestWriter = new SegmentBaseSingleSidxManifestWriterImpl(
trackFamilies, dashedFiles,
trackBitrate, trackFilename,
trackKeyIds);
MPDDocument mpdDocument = dashManifestWriter.getManifest();
XmlOptions xmlOptions = new XmlOptions();
//xmlOptions.setUseDefaultNamespace();
HashMap<String, String> ns = new HashMap<String, String>();
//ns.put("urn:mpeg:DASH:schema:MPD:2011", "");
ns.put("urn:mpeg:cenc:2013", "cenc");
xmlOptions.setSaveSuggestedPrefixes(ns);
xmlOptions.setSaveAggressiveNamespaces();
xmlOptions.setUseDefaultNamespace();
xmlOptions.setSavePrettyPrint();
mpdDocument.save(new File(this.outputDirectory, "Manifest.mpd"), xmlOptions);
}
private class StsdCorrectingTrack extends AbstractTrack {
Track track;
SampleDescriptionBox stsd;
public StsdCorrectingTrack(Track track, SampleDescriptionBox stsd) {
super(track.getName());
this.track = track;
this.stsd = stsd;
}
public void close() throws IOException {
track.close();
}
public SampleDescriptionBox getSampleDescriptionBox() {
return stsd;
}
public long[] getSampleDurations() {
return track.getSampleDurations();
}
public TrackMetaData getTrackMetaData() {
return track.getTrackMetaData();
}
public String getHandler() {
return track.getHandler();
}
public List<Sample> getSamples() {
return track.getSamples();
}
}
} |
package de.setsoftware.reviewtool.model.changestructure;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IMarker;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.resources.WorkspaceJob;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.OperationCanceledException;
import org.eclipse.core.runtime.Status;
import org.eclipse.swt.widgets.Display;
import de.setsoftware.reviewtool.base.Logger;
import de.setsoftware.reviewtool.base.Pair;
import de.setsoftware.reviewtool.base.ReviewtoolException;
import de.setsoftware.reviewtool.base.WeakListeners;
import de.setsoftware.reviewtool.model.Constants;
import de.setsoftware.reviewtool.model.api.IBinaryChange;
import de.setsoftware.reviewtool.model.api.IChange;
import de.setsoftware.reviewtool.model.api.IChangeData;
import de.setsoftware.reviewtool.model.api.IChangeSource;
import de.setsoftware.reviewtool.model.api.IChangeSourceUi;
import de.setsoftware.reviewtool.model.api.IChangeVisitor;
import de.setsoftware.reviewtool.model.api.ICommit;
import de.setsoftware.reviewtool.model.api.IFileHistoryNode;
import de.setsoftware.reviewtool.model.api.IFragment;
import de.setsoftware.reviewtool.model.api.IFragmentTracer;
import de.setsoftware.reviewtool.model.api.IRevisionedFile;
import de.setsoftware.reviewtool.model.api.ITextualChange;
import de.setsoftware.reviewtool.ordering.efficientalgorithm.TourCalculatorControl;
import de.setsoftware.reviewtool.telemetry.Telemetry;
/**
* Manages the current state regarding the changes/tours under review.
*/
public class ToursInReview {
/**
* Interface for observers of instances of {@link ToursInReview}.
*/
public static interface IToursInReviewChangeListener {
/**
* Is called when the available tours change (e.g. due to a merge or split).
*/
public abstract void toursChanged();
/**
* Is called when the active tour changes. Will not be called when the active tour
* changes together with the tours as a whole. Both arguments can be null, meaning that
* there is no respective tour.
*/
public abstract void activeTourChanged(Tour oldActive, Tour newActive);
}
/**
* Interface for user interaction during the creation of {@link ToursInReview}.
*/
public static interface ICreateToursUi {
/**
* Lets the user choose one of the given tour structures.
* The given list contains pairs with a description of the merge algorithm and the resulting tours.
* There always is at least one choice.
* When the user cancels, null is returned.
*/
public abstract List<? extends Tour> selectInitialTours(
List<? extends Pair<String, List<? extends Tour>>> choices);
/**
* Lets the user choose which subset of commits to review, which filters
* to apply and which of the commits to merge into a tour.
* When the user cancels, null is returned.
*
* @param changes All commits belonging to the review.
* @param strategyResults Pairs with a description of the filter strategy and the resulting filter candidates
* @param reviewRounds The review rounds conducted so far (to show them to the user).
*/
public abstract UserSelectedReductions selectIrrelevant(
List<? extends ICommit> changes,
List<Pair<String, Set<? extends IChange>>> strategyResults,
List<ReviewRoundInfo> reviewRounds);
}
/**
* Transfer object for the results of the user interaction to select
* subset of commits, filters, ...
*/
public static final class UserSelectedReductions {
private final List<? extends ICommit> commitSubset;
private final List<? extends Pair<String, Set<? extends IChange>>> toMakeIrrelevant;
public UserSelectedReductions(
List<? extends ICommit> chosenCommitSubset,
List<Pair<String, Set<? extends IChange>>> chosenFilterSubset) {
this.commitSubset = chosenCommitSubset;
this.toMakeIrrelevant = chosenFilterSubset;
}
}
/**
* Infos on a review round.
*/
public static final class ReviewRoundInfo implements Comparable<ReviewRoundInfo> {
private final int number;
private final Date date;
private final String user;
public ReviewRoundInfo(int number, Date date, String user) {
this.number = number;
this.date = date;
this.user = user;
}
@Override
public int compareTo(ReviewRoundInfo o) {
return Integer.compare(this.number, o.number);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof ReviewRoundInfo)) {
return false;
}
return this.compareTo((ReviewRoundInfo) o) == 0;
}
@Override
public int hashCode() {
return this.number;
}
public Date getTime() {
return this.date;
}
public String getReviewer() {
return this.user;
}
public int getNumber() {
return this.number;
}
}
private final VirtualFileHistoryGraph historyGraph;
private final List<Tour> topmostTours;
private final IChangeData remoteChanges;
private Map<File, IRevisionedFile> modifiedFiles;
private int currentTourIndex;
private final WeakListeners<IToursInReviewChangeListener> listeners = new WeakListeners<>();
private ToursInReview(final List<? extends Tour> topmostTours, final IChangeData remoteChanges) {
this.historyGraph = new VirtualFileHistoryGraph(remoteChanges.getHistoryGraph());
this.topmostTours = new ArrayList<>(topmostTours);
this.remoteChanges = remoteChanges;
this.modifiedFiles = remoteChanges.getLocalPathMap();
this.currentTourIndex = 0;
}
private ToursInReview(final List<? extends Tour> topmostTours) {
this.historyGraph = new VirtualFileHistoryGraph();
this.topmostTours = new ArrayList<>(topmostTours);
this.remoteChanges = null;
this.modifiedFiles = new LinkedHashMap<>();
this.currentTourIndex = 0;
}
/**
* Creates a new object with the given tours (mainly for tests).
*/
public static ToursInReview create(List<Tour> tours) {
return new ToursInReview(tours);
}
/**
* Loads the tours for the given ticket and creates a corresponding {@link ToursInReview}
* object with initial settings. When there is user interaction and the user cancels,
* null is returned.
*/
public static ToursInReview create(
IChangeSource src,
final IChangeSourceUi changeSourceUi,
List<? extends IIrrelevanceDetermination> irrelevanceDeterminationStrategies,
List<? extends ITourRestructuring> tourRestructuringStrategies,
IStopOrdering orderingAlgorithm,
ICreateToursUi createUi,
String ticketKey,
List<ReviewRoundInfo> reviewRounds) {
changeSourceUi.subTask("Determining relevant changes...");
final IChangeData changes = src.getRepositoryChanges(ticketKey, changeSourceUi);
changeSourceUi.subTask("Filtering changes...");
final List<? extends ICommit> filteredChanges =
filterChanges(irrelevanceDeterminationStrategies, changes.getMatchedCommits(),
createUi, changeSourceUi, reviewRounds);
if (filteredChanges == null) {
return null;
}
changeSourceUi.subTask("Creating tours from changes...");
final List<Tour> tours = toTours(
filteredChanges,
new FragmentTracer(changes.getHistoryGraph()),
changeSourceUi);
final List<? extends Tour> userSelection =
determinePossibleRestructurings(tourRestructuringStrategies, tours, createUi, changeSourceUi);
if (userSelection == null) {
return null;
}
changeSourceUi.subTask("Ordering stops...");
final List<? extends Tour> toursToShow = groupAndSort(
userSelection,
orderingAlgorithm,
new TourCalculatorControl() {
private static final long FAST_MODE_THRESHOLD = 20000;
private final long startTime = System.currentTimeMillis();
@Override
public synchronized boolean isCanceled() {
return changeSourceUi.isCanceled();
}
@Override
public boolean isFastModeNeeded() {
return System.currentTimeMillis() - this.startTime > FAST_MODE_THRESHOLD;
}
});
final ToursInReview result = new ToursInReview(toursToShow, changes);
result.createLocalTour(null, changeSourceUi, null);
return result;
}
private static List<? extends Tour> groupAndSort(
List<? extends Tour> userSelection, IStopOrdering orderingAlgorithm, TourCalculatorControl isCanceled) {
try {
final List<Tour> ret = new ArrayList<>();
for (final Tour t : userSelection) {
ret.add(new Tour(t.getDescription(), orderingAlgorithm.groupAndSort(t.getStops(), isCanceled)));
}
return ret;
} catch (final InterruptedException e) {
throw new OperationCanceledException();
}
}
/**
* (Re)creates the local tour by (re)collecting local changes and combining them with the repository changes
* in a {@link VirtualFileHistoryGraph}.
*
* @param progressMonitor The progress monitor to use.
* @param markerFactory The marker factory to use. May be null if initially called while creating the tours.
*/
public void createLocalTour(
final List<File> paths,
final IProgressMonitor progressMonitor,
final IStopMarkerFactory markerFactory) {
progressMonitor.subTask("Collecting local changes...");
final IChangeData localChanges;
try {
if (paths == null) {
localChanges = this.remoteChanges.getSource().getLocalChanges(this.remoteChanges, null,
progressMonitor);
} else {
final List<File> allFilesToAnalyze = new ArrayList<>(this.modifiedFiles.keySet());
allFilesToAnalyze.addAll(paths);
localChanges = this.remoteChanges.getSource().getLocalChanges(this.remoteChanges, allFilesToAnalyze,
progressMonitor);
}
} catch (final ReviewtoolException e) {
//if there is a problem while determining the local changes, ignore them
Logger.warn("problem while determining local changes", e);
return;
}
this.modifiedFiles = new LinkedHashMap<>(localChanges.getLocalPathMap());
if (this.historyGraph.size() > 1) {
this.historyGraph.remove(1);
}
this.historyGraph.add(localChanges.getHistoryGraph());
this.updateMostRecentFragmentsWithLocalChanges();
this.notifyListenersAboutTourStructureChange(markerFactory);
}
private void updateMostRecentFragmentsWithLocalChanges() {
final IFragmentTracer tracer = new FragmentTracer(this.historyGraph);
for (final Tour tour : this.topmostTours) {
for (final Stop stop : tour.getStops()) {
stop.updateMostRecentData(tracer);
}
}
}
private static List<? extends ICommit> filterChanges(
final List<? extends IIrrelevanceDetermination> irrelevanceDeterminationStrategies,
final List<? extends ICommit> changes,
final ICreateToursUi createUi,
final IProgressMonitor progressMonitor,
final List<ReviewRoundInfo> reviewRounds) {
Telemetry.event("originalChanges")
.param("count", countChanges(changes, false))
.param("relevant", countChanges(changes, true))
.log();
final List<Pair<String, Set<? extends IChange>>> strategyResults = new ArrayList<>();
for (final IIrrelevanceDetermination strategy : irrelevanceDeterminationStrategies) {
try {
if (progressMonitor.isCanceled()) {
throw new OperationCanceledException();
}
final Set<? extends IChange> irrelevantChanges = determineIrrelevantChanges(changes, strategy);
Telemetry.event("relevanceFilterResult")
.param("description", strategy.getDescription())
.param("size", irrelevantChanges.size())
.log();
if (areAllIrrelevant(irrelevantChanges)) {
//skip strategies that won't result in further changes to irrelevant
continue;
}
strategyResults.add(Pair.<String, Set<? extends IChange>>create(
strategy.getDescription(),
irrelevantChanges));
} catch (final Exception e) {
//skip instable strategies
Logger.error("exception in filtering", e);
}
}
final UserSelectedReductions selected =
createUi.selectIrrelevant(changes, strategyResults, reviewRounds);
if (selected == null) {
return null;
}
final Set<IChange> toMakeIrrelevant = new HashSet<>();
final Set<String> selectedDescriptions = new LinkedHashSet<>();
for (final Pair<String, Set<? extends IChange>> set : selected.toMakeIrrelevant) {
toMakeIrrelevant.addAll(set.getSecond());
selectedDescriptions.add(set.getFirst());
}
Telemetry.event("selectedRelevanceFilter")
.param("descriptions", selectedDescriptions)
.log();
final Set<String> selectedCommits = new LinkedHashSet<>();
for (final ICommit commit : selected.commitSubset) {
selectedCommits.add(commit.getRevision().toString());
}
Telemetry.event("selectedCommitSubset")
.param("commits", selectedCommits)
.log();
final List<ICommit> ret = new ArrayList<>();
for (final ICommit c : selected.commitSubset) {
ret.add(c.makeChangesIrrelevant(toMakeIrrelevant));
}
return ret;
}
private static int countChanges(List<? extends ICommit> changes, boolean onlyRelevant) {
int ret = 0;
for (final ICommit commit : changes) {
for (final IChange change : commit.getChanges()) {
if (!(onlyRelevant && change.isIrrelevantForReview())) {
ret++;
}
}
}
return ret;
}
private static Set<? extends IChange> determineIrrelevantChanges(
List<? extends ICommit> changes,
IIrrelevanceDetermination strategy) {
final Set<IChange> ret = new HashSet<>();
for (final ICommit commit : changes) {
for (final IChange change : commit.getChanges()) {
if (strategy.isIrrelevant(change)) {
ret.add(change);
}
}
}
return ret;
}
private static boolean areAllIrrelevant(Set<? extends IChange> changes) {
for (final IChange change : changes) {
if (!change.isIrrelevantForReview()) {
return false;
}
}
return true;
}
private static List<? extends Tour> determinePossibleRestructurings(
final List<? extends ITourRestructuring> tourRestructuringStrategies,
final List<Tour> originalTours,
final ICreateToursUi createUi,
final IProgressMonitor progressMonitor) {
final List<Pair<String, List<? extends Tour>>> possibleRestructurings = new ArrayList<>();
possibleRestructurings.add(Pair.<String, List<? extends Tour>>create("one tour per commit", originalTours));
Telemetry.event("originalTourStructure")
.params(Tour.determineSize(originalTours))
.log();
for (final ITourRestructuring restructuringStrategy : tourRestructuringStrategies) {
if (progressMonitor.isCanceled()) {
throw new OperationCanceledException();
}
try {
final List<? extends Tour> restructuredTour =
restructuringStrategy.restructure(new ArrayList<>(originalTours));
Telemetry.event("possibleTourStructure")
.param("strategy", restructuringStrategy.getClass())
.params(Tour.determineSize(restructuredTour))
.log();
if (restructuredTour != null) {
possibleRestructurings.add(Pair.<String, List<? extends Tour>>create(
restructuringStrategy.getDescription(), restructuredTour));
}
} catch (final Exception e) {
//skip instable restructurings
Logger.error("exception in restructuring", e);
}
}
return createUi.selectInitialTours(possibleRestructurings);
}
private static List<Tour> toTours(final List<? extends ICommit> changes, final IFragmentTracer tracer,
final IProgressMonitor progressMonitor) {
final List<Tour> ret = new ArrayList<>();
for (final ICommit c : changes) {
if (progressMonitor.isCanceled()) {
throw new OperationCanceledException();
}
assert c.isVisible();
ret.add(new Tour(
c.getMessage(),
toSliceFragments(c.getChanges(), tracer)));
}
return ret;
}
private static List<Stop> toSliceFragments(List<? extends IChange> changes, IFragmentTracer tracer) {
final List<Stop> ret = new ArrayList<>();
for (final IChange c : changes) {
ret.addAll(toSliceFragment(c, tracer));
}
return ret;
}
private static List<Stop> toSliceFragment(IChange c, final IFragmentTracer tracer) {
final List<Stop> ret = new ArrayList<>();
c.accept(new IChangeVisitor() {
@Override
public void handle(ITextualChange visitee) {
final List<? extends IFragment> mostRecentFragments = tracer.traceFragment(visitee.getToFragment());
for (final IFragment fragment : mostRecentFragments) {
ret.add(new Stop(visitee, fragment));
}
}
@Override
public void handle(IBinaryChange visitee) {
for (final IRevisionedFile fileInRevision : tracer.traceFile(visitee.getFrom())) {
ret.add(new Stop(visitee, fileInRevision));
}
}
});
return ret;
}
/**
* Creates markers for the tour stops.
*/
public void createMarkers(final IStopMarkerFactory markerFactory, final IProgressMonitor progressMonitor) {
final Map<IResource, PositionLookupTable> lookupTables = new HashMap<>();
for (int i = 0; i < this.topmostTours.size(); i++) {
final Tour s = this.topmostTours.get(i);
for (final Stop f : s.getStops()) {
if (progressMonitor.isCanceled()) {
throw new OperationCanceledException();
}
this.createMarkerFor(markerFactory, lookupTables, f, i == this.currentTourIndex);
}
}
}
private IMarker createMarkerFor(
IStopMarkerFactory markerFactory,
final Map<IResource, PositionLookupTable> lookupTables,
final Stop f,
final boolean tourActive) {
try {
final IResource resource = f.getMostRecentFile().determineResource();
if (resource == null) {
return null;
}
if (f.isDetailedFragmentKnown()) {
if (!lookupTables.containsKey(resource)) {
lookupTables.put(resource, PositionLookupTable.create((IFile) resource));
}
final IFragment pos = f.getMostRecentFragment();
final IMarker marker = markerFactory.createStopMarker(resource, tourActive);
marker.setAttribute(IMarker.LINE_NUMBER, pos.getFrom().getLine());
marker.setAttribute(IMarker.CHAR_START,
lookupTables.get(resource).getCharsSinceFileStart(pos.getFrom()));
marker.setAttribute(IMarker.CHAR_END,
lookupTables.get(resource).getCharsSinceFileStart(pos.getTo()));
return marker;
} else {
return markerFactory.createStopMarker(resource, tourActive);
}
} catch (final CoreException | IOException e) {
throw new ReviewtoolException(e);
}
}
/**
* Creates a marker for the given fragment.
* If multiple markers have to be created, use the method that caches lookup tables instead.
* If a marker could not be created (for example because the resource is not available in Eclipse), null
* is returned.
*/
public IMarker createMarkerFor(
IStopMarkerFactory markerFactory,
final Stop f) {
return this.createMarkerFor(markerFactory, new HashMap<IResource, PositionLookupTable>(), f, true);
}
/**
* Returns a {@link IFileHistoryNode} for passed file.
* @param file The file whose change history to retrieve.
* @return The {@link IFileHistoryNode} describing changes for passed {@link FileInRevision} or null if not found.
*/
public IFileHistoryNode getFileHistoryNode(final IRevisionedFile file) {
return this.historyGraph.getNodeFor(file);
}
public List<Tour> getTopmostTours() {
return this.topmostTours;
}
/**
* Sets the given tour as the active tour, if it is not already active.
* Recreates markers accordingly.
*/
public void ensureTourActive(Tour t, IStopMarkerFactory markerFactory) throws CoreException {
this.ensureTourActive(t, markerFactory, true);
}
/**
* Sets the given tour as the active tour, if it is not already active.
* Recreates markers accordingly.
*/
public void ensureTourActive(Tour t, final IStopMarkerFactory markerFactory, boolean notify)
throws CoreException {
final int index = this.topmostTours.indexOf(t);
if (index != this.currentTourIndex) {
final Tour oldActive = this.getActiveTour();
this.currentTourIndex = index;
new WorkspaceJob("Review marker update") {
@Override
public IStatus runInWorkspace(IProgressMonitor progressMonitor) throws CoreException {
ToursInReview.this.clearMarkers();
ToursInReview.this.createMarkers(markerFactory, progressMonitor);
return Status.OK_STATUS;
}
}.schedule();
if (notify) {
for (final IToursInReviewChangeListener l : this.listeners) {
l.activeTourChanged(oldActive, this.getActiveTour());
}
}
Telemetry.event("tourActivated")
.param("tourIndex", index)
.log();
}
}
/**
* Clears all current tour stop markers.
*/
public void clearMarkers() throws CoreException {
ResourcesPlugin.getWorkspace().getRoot().deleteMarkers(
Constants.STOPMARKER_ID, true, IResource.DEPTH_INFINITE);
ResourcesPlugin.getWorkspace().getRoot().deleteMarkers(
Constants.INACTIVESTOPMARKER_ID, true, IResource.DEPTH_INFINITE);
}
/**
* Returns the currently active tour or null if there is none (which should only
* occur when there are no tours).
*/
public Tour getActiveTour() {
return this.currentTourIndex >= this.topmostTours.size() || this.currentTourIndex < 0
? null : this.topmostTours.get(this.currentTourIndex);
}
private void notifyListenersAboutTourStructureChange(final IStopMarkerFactory markerFactory) {
// markerFactors is null only if called from ToursInReview.create(), and in this case ensureTourActive()
// is called later on which recreates the markers
if (markerFactory != null) {
new WorkspaceJob("Stop marker update") {
@Override
public IStatus runInWorkspace(IProgressMonitor progressMonitor) throws CoreException {
ToursInReview.this.clearMarkers();
ToursInReview.this.createMarkers(markerFactory, progressMonitor);
return Status.OK_STATUS;
}
}.schedule();
}
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
for (final IToursInReviewChangeListener l : ToursInReview.this.listeners) {
l.toursChanged();
}
}
});
}
public void registerListener(IToursInReviewChangeListener listener) {
this.listeners.add(listener);
}
/**
* Returns all stops (from all tours) that refer to the given file.
*/
public List<Stop> getStopsFor(File absolutePath) {
final List<Stop> ret = new ArrayList<>();
for (final Tour t : this.topmostTours) {
for (final Stop s : t.getStops()) {
if (absolutePath.equals(s.getAbsoluteFile())) {
ret.add(s);
}
}
}
return ret;
}
/**
* Returns the (first) tour that contains the given stop.
* If none exists, -1 is returned.
*/
public int findTourIndexWithStop(Stop currentStop) {
for (int i = 0; i < this.topmostTours.size(); i++) {
for (final Stop s : this.topmostTours.get(i).getStops()) {
if (s == currentStop) {
return i;
}
}
}
return 0;
}
/**
* Determines a stop that is as close as possible to the given line in the given resource.
* The closeness measure is tweaked to (hopefully) capture the users intention as good as possible
* for cases where he did not click directly on a stop.
*/
public Pair<Tour, Stop> findNearestStop(IPath absoluteResourcePath, int line) {
if (this.topmostTours.isEmpty()) {
return null;
}
Tour bestTour = null;
Stop bestStop = null;
int bestDist = Integer.MAX_VALUE;
for (final Tour t : this.topmostTours) {
for (final Stop stop : t.getStops()) {
final int candidateDist = this.calculateDistance(stop, absoluteResourcePath, line);
if (candidateDist < bestDist) {
bestTour = t;
bestStop = stop;
bestDist = candidateDist;
}
}
}
return Pair.create(bestTour, bestStop);
}
private int calculateDistance(Stop stop, IPath resource, int line) {
if (!stop.getMostRecentFile().toLocalPath().equals(resource)) {
return Integer.MAX_VALUE;
}
final IFragment fragment = stop.getMostRecentFragment();
if (fragment == null) {
return Integer.MAX_VALUE - 1;
}
if (line < fragment.getFrom().getLine()) {
//there is a bias that lets lines between stops belong more closely to the stop above than below
// to a certain degree
return (fragment.getFrom().getLine() - line) * 4;
} else if (line > fragment.getTo().getLine()) {
return line - fragment.getTo().getLine();
} else {
return 0;
}
}
/**
* Determines the direct parent tour of the given element.
* Returns null when none is found.
*/
public Tour getParentFor(TourElement element) {
for (final Tour t : this.topmostTours) {
final Tour parent = t.findParentFor(element);
if (parent != null) {
return parent;
}
}
return null;
}
/**
* Determines the topmost parent tour of the given element.
* Returns null when none is found.
*/
public Tour getTopmostTourWith(TourElement element) {
for (final Tour t : this.topmostTours) {
final Tour parent = t.findParentFor(element);
if (parent != null) {
return t;
}
}
return null;
}
} |
package jetbrains.buildServer.deployer.agent.ssh;
import com.intellij.openapi.util.text.StringUtil;
import com.jcraft.jsch.ChannelExec;
import com.jcraft.jsch.Session;
import java.io.IOException;
import jetbrains.buildServer.RunBuildException;
import jetbrains.buildServer.agent.BuildProgressLogger;
import jetbrains.buildServer.deployer.agent.SyncBuildProcessAdapter;
import org.jetbrains.annotations.NotNull;
import java.io.InputStream;
class SSHExecProcessAdapter extends SyncBuildProcessAdapter {
private final String myCommands;
private final SSHSessionProvider myProvider;
private final String myPty;
public SSHExecProcessAdapter(@NotNull final SSHSessionProvider provider,
@NotNull final String commands,
final String pty,
@NotNull final BuildProgressLogger buildLogger) {
super(buildLogger);
myProvider = provider;
myCommands = commands;
myPty = pty;
}
@Override
public void runProcess() throws RunBuildException {
final Session session = myProvider.getSession();
try {
executeCommand(session, myPty, myCommands);
} catch (RunBuildException e) {
throw e;
} catch (Exception e) {
throw new RunBuildException(e);
} finally {
if (session != null) {
session.disconnect();
}
}
}
private void executeCommand(Session session, String pty, String command) throws Exception {
ChannelExec channel = null;
myLogger.message("Executing commands:\n" + command + "\non host [" + session.getHost() + "]");
try {
channel = (ChannelExec)session.openChannel("exec");
if (!StringUtil.isEmpty(pty)) {
channel.setPty(true);
channel.setPtyType(pty);
}
channel.setCommand(command);
final InputStream inputStream = channel.getInputStream();
final InputStream errorStream = channel.getErrStream();
final StringBuilder result = new StringBuilder();
byte[] buf = new byte[8192];
channel.connect();
while (!isInterrupted()) {
boolean readFromInput = true;
boolean readFromError = true;
if (inputStream.available() > 0) {
readFromInput = readStream(inputStream, result, buf, 8192);
}
if (errorStream.available() > 0) {
readFromError = readStream(errorStream, result, buf, 8192);
}
boolean nothingWasRead = !readFromInput && !readFromError;
if (nothingWasRead || channel.isClosed()) {
break;
}
}
myLogger.message("Exec output:\n" + result.toString());
if (isInterrupted()) {
myLogger.message("Interrupted.");
}
} finally {
if (channel != null) {
channel.disconnect();
int exitCode = channel.getExitStatus();
myLogger.message("ssh exit-code: " + exitCode);
if (exitCode > 0) {
throw new RunBuildException("Non-zero exit code from ssh exec: [" + exitCode + "]");
}
}
}
}
private boolean readStream(InputStream inputStream, StringBuilder appendTo, byte[] buffer, final int BUFFER_LENGTH) throws IOException {
int i = inputStream.read(buffer, 0, BUFFER_LENGTH);
if (i < 0) {
return false;
}
appendTo.append(new String(buffer, 0, i));
return true;
}
} |
package org.hisp.dhis.dbms;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.SessionFactory;
import org.hisp.dhis.cache.HibernateCacheManager;
import org.springframework.jdbc.BadSqlGrammarException;
import org.springframework.jdbc.core.JdbcTemplate;
import java.util.List;
/**
* @author Lars Helge Overland
*/
public class HibernateDbmsManager
implements DbmsManager
{
private static final Log log = LogFactory.getLog( HibernateDbmsManager.class );
// Dependencies
private JdbcTemplate jdbcTemplate;
public void setJdbcTemplate( JdbcTemplate jdbcTemplate )
{
this.jdbcTemplate = jdbcTemplate;
}
private SessionFactory sessionFactory;
public void setSessionFactory( SessionFactory sessionFactory )
{
this.sessionFactory = sessionFactory;
}
private HibernateCacheManager cacheManager;
public void setCacheManager( HibernateCacheManager cacheManager )
{
this.cacheManager = cacheManager;
}
// DbmsManager implementation
@Override
public void emptyDatabase()
{
emptyTable( "translation" );
emptyTable( "importobject" );
emptyTable( "importdatavalue" );
emptyTable( "constant" );
emptyTable( "sqlview" );
emptyTable( "datavalue_audit" );
emptyTable( "datavalueaudit" );
emptyTable( "datavalue" );
emptyTable( "completedatasetregistration" );
emptyTable( "pushanalysisrecipientusergroups" );
emptyTable( "pushanalysis" );
emptyTable( "dashboarditem_users" );
emptyTable( "dashboarditem_resources" );
emptyTable( "dashboarditem_reports" );
emptyTable( "dashboard_items" );
emptyTable( "dashboarditem" );
emptyTable( "dashboardusergroupaccesses" );
emptyTable( "dashboard" );
emptyTable( "interpretation_comments" );
emptyTable( "interpretationcommenttranslations" );
emptyTable( "interpretationcomment" );
emptyTable( "interpretationtranslations" );
emptyTable( "interpretationusergroupaccesses" );
emptyTable( "interpretation" );
emptyTable( "delete from reportusergroupaccesses" );
emptyTable( "delete from report" );
emptyTable( "reporttable_categorydimensions" );
emptyTable( "reporttable_categoryoptiongroups" );
emptyTable( "reporttable_columns" );
emptyTable( "reporttable_datadimensionitems" );
emptyTable( "reporttable_dataelementgroups" );
emptyTable( "reporttable_filters" );
emptyTable( "reporttable_itemorgunitgroups" );
emptyTable( "reporttable_organisationunits" );
emptyTable( "reporttable_orgunitgroups" );
emptyTable( "reporttable_orgunitlevels" );
emptyTable( "reporttable_periods" );
emptyTable( "reporttable_rows" );
emptyTable( "reporttableusergroupaccesses" );
emptyTable( "reporttabletranslations" );
emptyTable( "reporttable" );
emptyTable( "chart_categorydimensions" );
emptyTable( "chart_categoryoptiongroups" );
emptyTable( "chart_datadimensionitems" );
emptyTable( "chart_dataelementgroups" );
emptyTable( "chart_filters" );
emptyTable( "chart_itemorgunitgroups" );
emptyTable( "chart_organisationunits" );
emptyTable( "chart_orgunitgroups" );
emptyTable( "chart_orgunitlevels" );
emptyTable( "chart_periods" );
emptyTable( "chartusergroupaccesses" );
emptyTable( "charttranslations" );
emptyTable( "chart" );
emptyTable( "eventreport_attributedimensions" );
emptyTable( "eventreport_columns" );
emptyTable( "eventreport_dataelementdimensions" );
emptyTable( "eventreport_filters" );
emptyTable( "eventreport_itemorgunitgroups" );
emptyTable( "eventreport_organisationunits" );
emptyTable( "eventreport_orgunitgroups" );
emptyTable( "eventreport_orgunitlevels" );
emptyTable( "eventreport_periods" );
emptyTable( "eventreport_programindicatordimensions" );
emptyTable( "eventreport_rows" );
emptyTable( "eventreportusergroupaccesses" );
emptyTable( "eventreporttranslations" );
emptyTable( "eventreport" );
emptyTable( "eventchart_attributedimensions" );
emptyTable( "eventchart_columns" );
emptyTable( "eventchart_dataelementdimensions" );
emptyTable( "eventchart_filters" );
emptyTable( "eventchart_itemorgunitgroups" );
emptyTable( "eventchart_organisationunits" );
emptyTable( "eventchart_orgunitgroups" );
emptyTable( "eventchart_orgunitlevels" );
emptyTable( "eventchart_periods" );
emptyTable( "eventchart_programindicatordimensions" );
emptyTable( "eventchart_rows" );
emptyTable( "eventchartusergroupaccesses" );
emptyTable( "eventcharttranslations" );
emptyTable( "eventchart" );
emptyTable( "users_catdimensionconstraints" );
emptyTable( "userrolemembers" );
emptyTable( "userroledataset" );
emptyTable( "userroleauthorities" );
emptyTable( "userdatavieworgunits" );
emptyTable( "usermembership" );
emptyTable( "userrole" );
emptyTable( "orgunitgroupsetmembers" );
emptyTable( "orgunitgroupset" );
emptyTable( "orgunitgroupsetusergroupaccesses" );
emptyTable( "orgunitgroupmembers" );
emptyTable( "orgunitgroup" );
emptyTable( "orgunitgroupusergroupaccesses" );
emptyTable( "validationrulegroupusergroupstoalert" );
emptyTable( "validationrulegroupmembers" );
emptyTable( "validationrulegroup" );
emptyTable( "validationrulegroupusergroupaccesses" );
emptyTable( "validationresult" );
emptyTable( "validationrule" );
emptyTable( "validationruleusergroupaccesses" );
emptyTable( "dataapproval" );
emptyTable( "lockexception" );
emptyTable( "datasetsource" );
emptyTable( "datasetelement" );
emptyTable( "datasetindicators" );
emptyTable( "datasetoperands" );
emptyTable( "datasetusergroupaccesses" );
emptyTable( "dataset" );
emptyTable( "dataapprovalworkflowlevels" );
emptyTable( "dataapprovalworkflow" );
emptyTable( "dataapprovallevel" );
emptyTable( "predictororgunitlevels" );
emptyTable( "predictor" );
emptyTable( "trackedentitydatavalue" );
emptyTable( "programstageinstance" );
emptyTable( "programinstance" );
emptyTable( "programstage_dataelements" );
emptyTable( "programstage" );
emptyTable( "program_organisationunits" );
emptyTable( "programusergroupaccesses" );
emptyTable( "program" );
emptyTable( "trackedentityinstance" );
emptyTable( "minmaxdataelement" );
emptyTable( "expressiondataelement" );
emptyTable( "expressionsampleelement" );
emptyTable( "expressionoptioncombo" );
emptyTable( "calculateddataelement" );
emptyTable( "dataelementgroupsetmembers" );
emptyTable( "dataelementgroupsetusergroupaccesses" );
emptyTable( "dataelementgroupset" );
emptyTable( "dataelementgroupmembers" );
emptyTable( "dataelementgroupusergroupaccesses" );
emptyTable( "dataelementgroup" );
emptyTable( "dataelementaggregationlevels" );
emptyTable( "dataelementoperand" );
emptyTable( "dataelementusergroupaccesses" );
emptyTable( "dataelement" );
emptyTable( "categoryoptioncombos_categoryoptions" );
emptyTable( "categorycombos_optioncombos" );
emptyTable( "categorycombos_categories" );
emptyTable( "categories_categoryoptions" );
emptyTable( "categoryoption_organisationunits" );
emptyTable( "organisationunit" );
emptyTable( "orgunitlevel" );
emptyTable( "version" );
emptyTable( "deletedobject" );
emptyTable( "mocksource" );
emptyTable( "period" );
emptyTable( "indicatorgroupsetmembers" );
emptyTable( "indicatorgroupsetusergroupaccesses" );
emptyTable( "indicatorgroupset" );
emptyTable( "indicatorgroupmembers" );
emptyTable( "indicatorgroupusergroupaccesses" );
emptyTable( "indicatorgroup" );
emptyTable( "indicator" );
emptyTable( "indicatortype" );
emptyTable( "categoryoptiongroupsetmembers" );
emptyTable( "categoryoptiongroupsetusergroupaccesses" );
emptyTable( "categoryoptiongroupset" );
emptyTable( "categoryoptiongroupmembers" );
emptyTable( "categoryoptiongroupusergroupaccesses" );
emptyTable( "categoryoptiongroup" );
emptyTable( "dataelementcategoryoptionusergroupaccesses" );
emptyTable( "expression" );
emptyTable( "categoryoptioncombo" );
emptyTable( "categorycombo" );
emptyTable( "dataelementcategory" );
emptyTable( "dataelementcategoryoption" );
emptyTable( "optionvalue" );
emptyTable( "optionset" );
emptyTable( "systemsetting" );
emptyTable( "usergroupusergroupaccesses" );
emptyTable( "usergroupaccess" );
emptyTable( "usergroupmembers" );
emptyTable( "usergroup" );
emptyTable( "users" );
emptyTable( "userinfo" );
dropTable( "_orgunitstructure" );
dropTable( "_datasetorganisationunitcategory" );
dropTable( "_categoryoptioncomboname" );
dropTable( "_dataelementgroupsetstructure" );
dropTable( "_indicatorgroupsetstructure" );
dropTable( "_organisationunitgroupsetstructure" );
dropTable( "_categorystructure" );
dropTable( "_dataelementstructure" );
dropTable( "_dateperiodstructure" );
dropTable( "_periodstructure" );
dropTable( "_dataelementcategoryoptioncombo" );
dropTable( "_dataapprovalminlevel" );
log.debug( "Cleared database contents" );
cacheManager.clearCache();
log.debug( "Cleared Hibernate cache" );
}
@Override
public void clearSession()
{
sessionFactory.getCurrentSession().flush();
sessionFactory.getCurrentSession().clear();
}
@Override
public void emptyTable( String table )
{
try
{
jdbcTemplate.update( "delete from " + table );
}
catch ( BadSqlGrammarException ex )
{
log.debug( "Table " + table + " does not exist" );
}
}
@Override
public boolean tableExists( String tableName )
{
final String sql =
"select table_name from information_schema.tables " +
"where table_name = '" + tableName + "' " +
"and table_type = 'BASE TABLE'";
List<Object> tables = jdbcTemplate.queryForList( sql, Object.class );
return tables != null && tables.size() > 0;
}
// Supportive methods
private void dropTable( String table )
{
try
{
jdbcTemplate.execute( "drop table " + table );
}
catch ( BadSqlGrammarException ex )
{
log.debug( "Table " + table + " does not exist" );
}
}
} |
package org.hisp.dhis.webapi.controller.mapping;
import org.hisp.dhis.common.DimensionService;
import org.hisp.dhis.common.cache.CacheStrategy;
import org.hisp.dhis.dxf2.metadata.MetadataImportParams;
import org.hisp.dhis.dxf2.webmessage.WebMessageException;
import org.hisp.dhis.dxf2.webmessage.WebMessageUtils;
import org.hisp.dhis.i18n.I18nFormat;
import org.hisp.dhis.i18n.I18nManager;
import org.hisp.dhis.legend.LegendService;
import org.hisp.dhis.mapgeneration.MapGenerationService;
import org.hisp.dhis.mapping.Map;
import org.hisp.dhis.mapping.MapView;
import org.hisp.dhis.mapping.MappingService;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitGroupService;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.program.ProgramService;
import org.hisp.dhis.program.ProgramStageService;
import org.hisp.dhis.schema.descriptors.MapSchemaDescriptor;
import org.hisp.dhis.user.UserService;
import org.hisp.dhis.webapi.controller.AbstractCrudController;
import org.hisp.dhis.webapi.utils.ContextUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import javax.imageio.ImageIO;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.Date;
import java.util.Set;
import static org.hisp.dhis.common.DimensionalObjectUtils.getDimensions;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
* @author Lars Helge Overland
*/
@Controller
@RequestMapping( value = MapSchemaDescriptor.API_ENDPOINT )
public class MapController
extends AbstractCrudController<Map>
{
private static final int MAP_MIN_WIDTH = 140;
private static final int MAP_MIN_HEIGHT = 25;
@Autowired
private MappingService mappingService;
@Autowired
private LegendService legendService;
@Autowired
private OrganisationUnitService organisationUnitService;
@Autowired
private OrganisationUnitGroupService organisationUnitGroupService;
@Autowired
private ProgramService programService;
@Autowired
private ProgramStageService programStageService;
@Autowired
private I18nManager i18nManager;
@Autowired
private MapGenerationService mapGenerationService;
@Autowired
private DimensionService dimensionService;
@Autowired
private UserService userService;
@Autowired
private ContextUtils contextUtils;
// CRUD
@Override
@RequestMapping( method = RequestMethod.POST, consumes = "application/json" )
@ResponseStatus( HttpStatus.CREATED )
public void postJsonObject( HttpServletRequest request, HttpServletResponse response ) throws Exception
{
Map map = deserializeJsonEntity( request, response );
map.getTranslations().clear();
mappingService.addMap( map );
response.addHeader( "Location", MapSchemaDescriptor.API_ENDPOINT + "/" + map.getUid() );
webMessageService.send( WebMessageUtils.created( "Map created" ), response, request );
}
@Override
@RequestMapping( value = "/{uid}", method = RequestMethod.PUT, consumes = "application/json" )
@ResponseStatus( HttpStatus.NO_CONTENT )
public void putJsonObject( @PathVariable String uid, HttpServletRequest request, HttpServletResponse response ) throws Exception
{
Map map = mappingService.getMap( uid );
if ( map == null )
{
throw new WebMessageException( WebMessageUtils.notFound( "Map does not exist: " + uid ) );
}
MetadataImportParams params = importService.getParamsFromMap( contextService.getParameterValuesMap() );
Map newMap = deserializeJsonEntity( request, response );
map.mergeWith( newMap, params.getMergeMode() );
map.setUid( uid );
mappingService.updateMap( map );
}
@Override
protected void preUpdateEntity( Map map, Map newMap )
{
map.getMapViews().clear();
if ( newMap.getUser() == null )
{
map.setUser( null );
}
}
@Override
protected Map deserializeJsonEntity( HttpServletRequest request, HttpServletResponse response ) throws IOException
{
Map map = super.deserializeJsonEntity( request, response );
mergeMap( map );
return map;
}
// Get data
@RequestMapping( value = { "/{uid}/data", "/{uid}/data.png" }, method = RequestMethod.GET )
public void getMapData( @PathVariable String uid,
@RequestParam( value = "date", required = false ) Date date,
@RequestParam( value = "ou", required = false ) String ou,
@RequestParam( required = false ) Integer width,
@RequestParam( required = false ) Integer height,
@RequestParam( value = "attachment", required = false ) boolean attachment,
HttpServletResponse response ) throws Exception
{
Map map = mappingService.getMapNoAcl( uid );
if ( map == null )
{
throw new WebMessageException( WebMessageUtils.notFound( "Map does not exist: " + uid ) );
}
if ( width != null && width < MAP_MIN_WIDTH )
{
throw new WebMessageException( WebMessageUtils.conflict( "Min map width is " + MAP_MIN_WIDTH + ": " + width ) );
}
if ( height != null && height < MAP_MIN_HEIGHT )
{
throw new WebMessageException( WebMessageUtils.conflict( "Min map height is " + MAP_MIN_HEIGHT + ": " + height ) );
}
OrganisationUnit unit = ou != null ? organisationUnitService.getOrganisationUnit( ou ) : null;
renderMapViewPng( map, date, unit, width, height, attachment, response );
}
// Hooks
@Override
public void postProcessEntity( Map map ) throws Exception
{
I18nFormat format = i18nManager.getI18nFormat();
Set<OrganisationUnit> roots = currentUserService.getCurrentUser().getDataViewOrganisationUnitsWithFallback();
for ( MapView view : map.getMapViews() )
{
view.populateAnalyticalProperties();
for ( OrganisationUnit organisationUnit : view.getOrganisationUnits() )
{
view.getParentGraphMap().put( organisationUnit.getUid(), organisationUnit.getParentGraph( roots ) );
}
if ( view.getPeriods() != null && !view.getPeriods().isEmpty() )
{
for ( Period period : view.getPeriods() )
{
period.setName( format.formatPeriod( period ) );
}
}
}
}
// Supportive methods
private void mergeMap( Map map )
{
if ( map.getUser() != null )
{
map.setUser( userService.getUser( map.getUser().getUid() ) );
}
else
{
map.setUser( currentUserService.getCurrentUser() );
}
map.getMapViews().forEach( this::mergeMapView );
}
private void mergeMapView( MapView view )
{
dimensionService.mergeAnalyticalObject( view );
dimensionService.mergeEventAnalyticalObject( view );
view.getColumnDimensions().clear();
view.getColumnDimensions().addAll( getDimensions( view.getColumns() ) );
if ( view.getLegendSet() != null )
{
view.setLegendSet( legendService.getLegendSet( view.getLegendSet().getUid() ) );
}
if ( view.getOrganisationUnitGroupSet() != null )
{
view.setOrganisationUnitGroupSet( organisationUnitGroupService.getOrganisationUnitGroupSet( view.getOrganisationUnitGroupSet().getUid() ) );
}
if ( view.getProgram() != null )
{
view.setProgram( programService.getProgram( view.getProgram().getUid() ) );
}
if ( view.getProgramStage() != null )
{
view.setProgramStage( programStageService.getProgramStage( view.getProgramStage().getUid() ) );
}
}
private void renderMapViewPng( Map map, Date date, OrganisationUnit unit, Integer width, Integer height, boolean attachment, HttpServletResponse response )
throws Exception
{
BufferedImage image = mapGenerationService.generateMapImage( map, date, unit, width, height );
if ( image != null )
{
contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_PNG, CacheStrategy.RESPECT_SYSTEM_SETTING, "map.png", attachment );
ImageIO.write( image, "PNG", response.getOutputStream() );
}
else
{
response.setStatus( HttpServletResponse.SC_NO_CONTENT );
}
}
} |
package com.mercadopago.model;
import com.mercadopago.util.ApiUtil;
import java.util.List;
public class ApiException {
private List<Cause> cause;
private String error;
private String message;
private Integer status;
public ApiException() {
}
public ApiException(String message, Integer status, String error, List<Cause> cause) {
this.message = message;
this.status = status;
this.error = error;
this.cause = cause;
}
public List<Cause> getCause() {
return cause;
}
public void setCause(List<Cause> cause) {
this.cause = cause;
}
public String getError() {
return error;
}
public void setError(String error) {
this.error = error;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public boolean isRecoverable() {
return getStatus() == null || getStatus() != ApiUtil.StatusCodes.NOT_FOUND
&& (getCause() == null || getCause().isEmpty());
}
public class ErrorCodes {
public static final String CUSTOMER_NOT_ALLOWED_TO_OPERATE = "2021";
public static final String COLLECTOR_NOT_ALLOWED_TO_OPERATE = "2022";
public static final String INVALID_USERS_INVOLVED = "2035";
public static final String CUSTOMER_EQUAL_TO_COLLECTOR = "3000";
public static final String INVALID_CARD_HOLDER_NAME = "3001";
public static final String UNAUTHORIZED_CLIENT = "3010";
public static final String PAYMENT_METHOD_NOT_FOUND = "3012";
public static final String INVALID_SECURITY_CODE = "3013";
public static final String SECURITY_CODE_REQUIRED = "3014";
public static final String INVALID_PAYMENT_METHOD = "3015";
public static final String INVALID_CARD_NUMBER = "3017";
public static final String EMPTY_EXPIRATION_MONTH = "3019";
public static final String EMPTY_EXPIRATION_YEAR = "3020";
public static final String EMPTY_CARD_HOLDER_NAME = "3021";
public static final String EMPTY_DOCUMENT_NUMBER = "3022";
public static final String EMPTY_DOCUMENT_TYPE = "3023";
public static final String INVALID_PAYMENT_TYPE_ID = "3028";
public static final String INVALID_PAYMENT_METHOD_ID = "3029";
public static final String INVALID_CARD_EXPIRATION_MONTH = "3030";
public static final String INVALID_CARD_EXPIRATION_YEAR = "4000";
public static final String INVALID_PAYER_EMAIL = "4050";
}
} |
package org.estatio.module.application.canonical.v1;
import org.apache.isis.applib.annotation.DomainService;
import org.apache.isis.applib.annotation.NatureOfService;
import org.incode.module.communications.dom.impl.commchannel.PostalAddress;
import org.incode.module.country.dom.impl.Country;
import org.incode.module.country.dom.impl.State;
import org.estatio.canonical.communicationchannel.v1.PostalAddressDto;
import org.estatio.module.base.platform.applib.DtoFactoryAbstract;
@DomainService(
nature = NatureOfService.DOMAIN
)
public class PostalAddressDtoFactory extends DtoFactoryAbstract<PostalAddress, PostalAddressDto> {
public PostalAddressDtoFactory() {
super(PostalAddress.class, PostalAddressDto.class);
}
public PostalAddressDto newDto(final PostalAddress postalAddress) {
final PostalAddressDto dto = new PostalAddressDto();
dto.setSelf(mappingHelper.oidDtoFor(postalAddress));
dto.setAtPath(postalAddress.getAtPath());
dto.setAddress1(postalAddress.getAddress1());
dto.setAddress2(postalAddress.getAddress2());
dto.setAddress3(postalAddress.getAddress3());
dto.setCity(postalAddress.getCity());
final State postalAddressState = postalAddress.getState();
if (postalAddressState != null) {
dto.setStateReference(postalAddressState.getReference());
dto.setStateName(postalAddressState.getName());
}
final Country postalAddressCountry = postalAddress.getCountry();
if(postalAddressCountry != null) {
dto.setCountryReference(postalAddressCountry.getReference());
dto.setCountryAlpha2Code(postalAddressCountry.getAlpha2Code());
dto.setCountryName(postalAddressCountry.getName());
}
return dto;
}
} |
package fitnesse.testsystems.slim;
import fitnesse.slim.SlimError;
import fitnesse.slim.SlimException;
import fitnesse.slim.instructions.*;
import fitnesse.slim.protocol.SlimDeserializer;
import fitnesse.slim.protocol.SlimSerializer;
import fitnesse.testsystems.CommandRunner;
import fitnesse.testsystems.CommandRunnerExecutionLog;
import fitnesse.testsystems.ExecutionLog;
import util.ListUtility;
import util.StreamReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.Socket;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static util.ListUtility.list;
public class SlimCommandRunningClient implements SlimClient {
public static final int NO_SLIM_SERVER_CONNECTION_FLAG = -32000;
public static double MINIMUM_REQUIRED_SLIM_VERSION = 0.3;
private final CommandRunner slimRunner;
private Socket client;
private StreamReader reader;
private BufferedWriter writer;
private String slimServerVersionMessage;
private double slimServerVersion;
private String hostName;
private int port;
public SlimCommandRunningClient(CommandRunner slimRunner, String hostName, int port) {
this.slimRunner = slimRunner;
this.port = port;
this.hostName = hostName;
}
@Override
public void start() throws IOException {
slimRunner.asynchronousStart();
waitUntilStarted();
checkForVersionMismatch();
}
void waitUntilStarted() {
while (!isStarted())
try {
Thread.sleep(50);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private boolean isStarted() {
try {
connect();
return true;
} catch (Exception e) {
return false;
}
}
private void checkForVersionMismatch() {
double serverVersionNumber = getServerVersion();
if (serverVersionNumber == NO_SLIM_SERVER_CONNECTION_FLAG) {
throw new SlimError("Slim Protocol Version Error: Server did not respond with a valid version number.");
}
else if (serverVersionNumber < MINIMUM_REQUIRED_SLIM_VERSION) {
throw new SlimError(String.format("Slim Protocol Version Error: Expected V%s but was V%s", MINIMUM_REQUIRED_SLIM_VERSION, serverVersionNumber));
}
}
@Override
public void kill() throws IOException {
if (slimRunner != null)
slimRunner.kill();
reader.close();
writer.close();
client.close();
}
@Override
public void connect() throws IOException {
for (int tries = 0; tryConnect() == false; tries++) {
if (tries > 100)
throw new SlimError("Could not build Slim.");
try {
Thread.sleep(50);
} catch (InterruptedException e) {
throw new SlimError("Wait for connection interrupted.");
}
}
reader = new StreamReader(client.getInputStream());
writer = new BufferedWriter(new OutputStreamWriter(client.getOutputStream(), "UTF-8"));
slimServerVersionMessage = reader.readLine();
validateConnection();
}
private void validateConnection() {
if (isConnected()) {
slimServerVersion = Double.parseDouble(slimServerVersionMessage.replace("Slim
}
else {
slimServerVersion = NO_SLIM_SERVER_CONNECTION_FLAG;
System.out.println("Error reading Slim Version. Read the following: " + slimServerVersionMessage);
}
}
private boolean tryConnect() {
try {
client = new Socket(hostName, port);
return true;
} catch (IOException e) {
return false;
}
}
public double getServerVersion() {
return slimServerVersion;
}
public boolean isConnected() {
return slimServerVersionMessage.startsWith("Slim
}
@Override
public Map<String, Object> invokeAndGetResponse(List<Instruction> statements) throws IOException {
if (statements.size() == 0)
return new HashMap<String, Object>();
String instructions = SlimSerializer.serialize(toList(statements));
writeString(instructions);
int resultLength = getLengthToRead();
String results = reader.read(resultLength);
// resultList is a list: [tag, resultValue]
List<Object> resultList = SlimDeserializer.deserialize(results);
return resultToMap(resultList);
}
@Override
public ExecutionLog getExecutionLog() {
return new CommandRunnerExecutionLog(slimRunner);
}
private interface ToListExecutor extends InstructionExecutor {
}
private List<Object> toList(List<Instruction> instructions) {
final List<Object> statementsAsList = new ArrayList<Object>(instructions.size());
for (final Instruction instruction: instructions) {
ToListExecutor executor = new ToListExecutor() {
@Override
public void addPath(String path) throws SlimException {
statementsAsList.add(list(instruction.getId(), ImportInstruction.INSTRUCTION, path));
}
@Override
public Object callAndAssign(String symbolName, String instanceName, String methodsName, Object... arguments) throws SlimException {
List<Object> list = ListUtility.list((Object) instruction.getId(), CallAndAssignInstruction.INSTRUCTION, symbolName, instanceName, methodsName);
addArguments(list, arguments);
statementsAsList.add(list);
return null;
}
@Override
public Object call(String instanceName, String methodName, Object... arguments) throws SlimException {
List<Object> list = ListUtility.list((Object) instruction.getId(), CallInstruction.INSTRUCTION, instanceName, methodName);
addArguments(list, arguments);
statementsAsList.add(list);
return null;
}
@Override
public void create(String instanceName, String className, Object... constructorArgs) throws SlimException {
List<Object> list = ListUtility.list((Object) instruction.getId(), MakeInstruction.INSTRUCTION, instanceName, className);
addArguments(list, constructorArgs);
statementsAsList.add(list);
}
};
instruction.execute(executor);
}
return statementsAsList;
}
private static void addArguments(List<Object> list, Object[] arguments) {
for (Object arg: arguments) {
list.add(arg);
}
}
private int getLengthToRead() throws IOException {
String resultLength = reader.read(6);
reader.read(1);
int length = 0;
try {
length = Integer.parseInt(resultLength);
}
catch (NumberFormatException e){
throw new RuntimeException("Steam Read Failure. Can't read length of message from the server. Possibly test aborted. Last thing read: " + resultLength);
}
return length;
}
protected void writeString(String string) throws IOException {
String packet = String.format("%06d:%s", string.getBytes("UTF-8").length, string);
writer.write(packet);
writer.flush();
}
@Override
public void bye() throws IOException {
writeString("bye");
slimRunner.join();
kill();
}
public static Map<String, Object> resultToMap(List<? extends Object> slimResults) {
Map<String, Object> map = new HashMap<String, Object>();
for (Object aResult : slimResults) {
List<Object> resultList = ListUtility.uncheckedCast(Object.class, aResult);
map.put((String) resultList.get(0), resultList.get(1));
}
return map;
}
} |
package de.charite.compbio.exomiser.core.factories;
import htsjdk.variant.variantcontext.VariantContext;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.charite.compbio.exomiser.core.model.Variant;
import de.charite.compbio.exomiser.core.model.VariantEvaluation;
import de.charite.compbio.jannovar.annotation.VariantAnnotations;
import de.charite.compbio.jannovar.htsjdk.InvalidCoordinatesException;
import de.charite.compbio.jannovar.htsjdk.VariantContextAnnotator;
import de.charite.compbio.jannovar.data.JannovarData;
import de.charite.compbio.jannovar.reference.GenomeVariant;
import java.util.Collections;
/**
* Given a {@link VariantAnnotationsFactory}, build a {@link Variant} for each
* alternative allele.
*
* Uses the {@link VariantContextAnnotator} class of the Jannovar-HTSJDK bridge.
*
* @author Jules Jacobsen <jules.jacobsen@sanger.ac.uk>
*/
public class VariantAnnotationsFactory {
private static final Logger logger = LoggerFactory.getLogger(VariantAnnotationsFactory.class);
/**
* tool for obtaining annotations for the {@link VariantContext} objects
*/
private final VariantContextAnnotator annotator;
public VariantAnnotationsFactory(JannovarData jannovarData) {
this.annotator = new VariantContextAnnotator(jannovarData.getRefDict(), jannovarData.getChromosomes());
}
/**
* Returns a list of variants of known reference. If a VariantContext has no
* know reference on the genome an empty list will be returned.
*
* @param variantContext {@link VariantContext} to get {@link Variant} objects for
* @return one {@link Variant} object for each alternative allele in vc.
*/
public List<VariantAnnotations> buildVariantAnnotations(VariantContext variantContext) {
return buildAlleleAnnotations(variantContext);
}
private List<VariantAnnotations> buildAlleleAnnotations(VariantContext variantContext) {
try {
//builds one annotation list for each alternative allele
return annotator.buildAnnotations(variantContext);
} catch (InvalidCoordinatesException ex) {
//not all genes can be assigned to a chromosome, so these will fail here.
//TODO: how to report these? They will not be used in the analysis.
logger.warn("Cannot build annotations for VariantContext {} {} {} - coordinates are invalid: {}", variantContext.getChr(), variantContext.getStart(), variantContext.getAlleles(), ex);
return Collections.emptyList();
}
}
} |
package org.fedorahosted.flies.webtrans.client.editor.table;
import static org.fedorahosted.flies.webtrans.client.editor.table.TableConstants.MAX_PAGE_ROW;
import java.util.ArrayList;
import java.util.List;
import net.customware.gwt.dispatch.client.DispatchAsync;
import net.customware.gwt.presenter.client.EventBus;
import net.customware.gwt.presenter.client.place.Place;
import net.customware.gwt.presenter.client.place.PlaceRequest;
import net.customware.gwt.presenter.client.widget.WidgetDisplay;
import org.fedorahosted.flies.common.ContentState;
import org.fedorahosted.flies.common.EditState;
import org.fedorahosted.flies.webtrans.client.editor.DocumentEditorPresenter;
import org.fedorahosted.flies.webtrans.client.editor.HasPageNavigation;
import org.fedorahosted.flies.webtrans.client.editor.filter.ContentFilter;
import org.fedorahosted.flies.webtrans.client.editor.filter.FilterDisabledEvent;
import org.fedorahosted.flies.webtrans.client.editor.filter.FilterDisabledEventHandler;
import org.fedorahosted.flies.webtrans.client.editor.filter.FilterEnabledEvent;
import org.fedorahosted.flies.webtrans.client.editor.filter.FilterEnabledEventHandler;
import org.fedorahosted.flies.webtrans.client.events.DocumentSelectionEvent;
import org.fedorahosted.flies.webtrans.client.events.DocumentSelectionHandler;
import org.fedorahosted.flies.webtrans.client.events.NavTransUnitEvent;
import org.fedorahosted.flies.webtrans.client.events.NavTransUnitHandler;
import org.fedorahosted.flies.webtrans.client.events.NotificationEvent;
import org.fedorahosted.flies.webtrans.client.events.TransMemoryCopyEvent;
import org.fedorahosted.flies.webtrans.client.events.TransMemoryCopyHandler;
import org.fedorahosted.flies.webtrans.client.events.TransUnitEditEvent;
import org.fedorahosted.flies.webtrans.client.events.TransUnitEditEventHandler;
import org.fedorahosted.flies.webtrans.client.events.TransUnitSelectionEvent;
import org.fedorahosted.flies.webtrans.client.events.TransUnitUpdatedEvent;
import org.fedorahosted.flies.webtrans.client.events.TransUnitUpdatedEventHandler;
import org.fedorahosted.flies.webtrans.client.events.NotificationEvent.Severity;
import org.fedorahosted.flies.webtrans.client.rpc.CachingDispatchAsync;
import org.fedorahosted.flies.webtrans.shared.auth.AuthenticationError;
import org.fedorahosted.flies.webtrans.shared.auth.AuthorizationError;
import org.fedorahosted.flies.webtrans.shared.auth.Identity;
import org.fedorahosted.flies.webtrans.shared.model.DocumentId;
import org.fedorahosted.flies.webtrans.shared.model.TransUnit;
import org.fedorahosted.flies.webtrans.shared.model.TransUnitId;
import org.fedorahosted.flies.webtrans.shared.rpc.EditingTranslationAction;
import org.fedorahosted.flies.webtrans.shared.rpc.EditingTranslationResult;
import org.fedorahosted.flies.webtrans.shared.rpc.GetTransUnits;
import org.fedorahosted.flies.webtrans.shared.rpc.GetTransUnitsResult;
import org.fedorahosted.flies.webtrans.shared.rpc.GetTransUnitsStates;
import org.fedorahosted.flies.webtrans.shared.rpc.GetTransUnitsStatesResult;
import org.fedorahosted.flies.webtrans.shared.rpc.UpdateTransUnit;
import org.fedorahosted.flies.webtrans.shared.rpc.UpdateTransUnitResult;
import com.allen_sauer.gwt.log.client.Log;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.event.dom.client.KeyCodes;
import com.google.gwt.event.logical.shared.HasSelectionHandlers;
import com.google.gwt.event.logical.shared.SelectionEvent;
import com.google.gwt.event.logical.shared.SelectionHandler;
import com.google.gwt.gen2.event.shared.HandlerRegistration;
import com.google.gwt.gen2.table.client.TableModel;
import com.google.gwt.gen2.table.client.TableModel.Callback;
import com.google.gwt.gen2.table.client.TableModelHelper.Request;
import com.google.gwt.gen2.table.client.TableModelHelper.SerializableResponse;
import com.google.gwt.gen2.table.event.client.HasPageChangeHandlers;
import com.google.gwt.gen2.table.event.client.HasPageCountChangeHandlers;
import com.google.gwt.gen2.table.event.client.PageChangeHandler;
import com.google.gwt.gen2.table.event.client.PageCountChangeHandler;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.Event.NativePreviewEvent;
import com.google.gwt.user.client.Event.NativePreviewHandler;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.inject.Inject;
public class TableEditorPresenter extends DocumentEditorPresenter<TableEditorPresenter.Display>
implements HasPageNavigation, HasPageChangeHandlers, HasPageCountChangeHandlers {
public static final Place PLACE = new Place("TableEditor");
public interface Display extends WidgetDisplay, HasPageNavigation {
HasSelectionHandlers<TransUnit> getSelectionHandlers();
HasPageChangeHandlers getPageChangeHandlers();
HasPageCountChangeHandlers getPageCountChangeHandlers();
RedirectingCachedTableModel<TransUnit> getTableModel();
void setTableModelHandler(TableModelHandler<TransUnit> hadler);
void reloadPage();
void setPageSize(int size);
void setContentFilter(ContentFilter<TransUnit> filter);
void clearContentFilter();
void gotoRow(int row);
void gotoRow(int row, boolean andEdit);
int getCurrentPageNumber();
TransUnit getTransUnitValue(int row);
InlineTargetCellEditor getTargetCellEditor();
List<TransUnit> getRowValues();
boolean isFirstPage();
boolean isLastPage();
int getCurrentPage();
int getPageSize();
}
private DocumentId documentId;
private final DispatchAsync dispatcher;
private final Identity identity;
private TransUnit selectedTransUnit;
//private int lastRowNum;
private List<Long> transIdNextFuzzyCache = new ArrayList<Long>();
private List<Long> transIdNextNewCache = new ArrayList<Long>();
private List<Long> transIdPrevFuzzyCache = new ArrayList<Long>();
private List<Long> transIdPrevNewCache = new ArrayList<Long>();
private int curRowIndex;
private int curPage;
@Inject
public TableEditorPresenter(final Display display, final EventBus eventBus, final CachingDispatchAsync dispatcher,final Identity identity) {
super(display, eventBus);
this.dispatcher = dispatcher;
this.identity = identity;
}
@Override
public Place getPlace() {
return PLACE;
}
@Override
protected void onBind() {
display.setTableModelHandler(tableModelHandler);
display.setPageSize(TableConstants.PAGE_SIZE);
registerHandler(display.getSelectionHandlers().addSelectionHandler(new SelectionHandler<TransUnit>() {
@Override
public void onSelection(SelectionEvent<TransUnit> event) {
if(selectedTransUnit == null || !event.getSelectedItem().getId().equals(selectedTransUnit.getId())) {
selectedTransUnit = event.getSelectedItem();
Log.info("SelectedTransUnit "+selectedTransUnit.getId());
//Clean the cache when we click the new entry
if(!transIdNextNewCache.isEmpty())
transIdNextNewCache.clear();
if(!transIdPrevNewCache.isEmpty())
transIdPrevNewCache.clear();
if(!transIdNextFuzzyCache.isEmpty())
transIdNextFuzzyCache.clear();
if(!transIdPrevFuzzyCache.isEmpty())
transIdPrevFuzzyCache.clear();
eventBus.fireEvent( new TransUnitSelectionEvent(selectedTransUnit));
}
}
}));
registerHandler(
eventBus.addHandler(DocumentSelectionEvent.getType(), new DocumentSelectionHandler() {
@Override
public void onDocumentSelected(DocumentSelectionEvent event) {
if(!event.getDocument().getId().equals(documentId)) {
display.startProcessing();
documentId = event.getDocument().getId();
display.getTableModel().clearCache();
display.getTableModel().setRowCount(TableModel.UNKNOWN_ROW_COUNT);
display.gotoPage(0, true);
display.stopProcessing();
}
}
})
);
registerHandler(eventBus.addHandler(FilterEnabledEvent.getType(), new FilterEnabledEventHandler() {
@Override
public void onFilterEnabled(FilterEnabledEvent event) {
display.setContentFilter(event.getContentFilter());
}
}));
registerHandler(eventBus.addHandler(FilterDisabledEvent.getType(), new FilterDisabledEventHandler() {
@Override
public void onFilterDisabled(FilterDisabledEvent event) {
display.clearContentFilter();
}
}));
registerHandler(eventBus.addHandler(TransUnitUpdatedEvent.getType(), new TransUnitUpdatedEventHandler() {
@Override
public void onTransUnitUpdated(TransUnitUpdatedEvent event) {
if(documentId != null && documentId.equals(event.getDocumentId())) {
//Clear the cache
if(!transIdNextFuzzyCache.isEmpty())
transIdNextFuzzyCache.clear();
if(!transIdNextNewCache.isEmpty())
transIdNextNewCache.clear();
if(!transIdPrevFuzzyCache.isEmpty())
transIdPrevFuzzyCache.clear();
if(!transIdPrevNewCache.isEmpty())
transIdPrevNewCache.clear();
// TODO this test never succeeds
if(selectedTransUnit != null && selectedTransUnit.getId().equals(event.getTransUnitId())) {
// handle change in current selection
//eventBus.fireEvent(new NotificationEvent(Severity.Warning, "Someone else updated this translation unit. you're in trouble..."));
//display.getTableModel().setRowValue(row, rowValue);
Log.info("selected TU updated; cancelling edit");
display.getTargetCellEditor().cancelEdit();
// TODO reload page and return
}
boolean reloadPage = false;
if (reloadPage) {
display.getTargetCellEditor().cancelEdit();
display.getTableModel().clearCache();
display.reloadPage();
} else {
final Integer rowOffset = getRowOffset(event.getTransUnitId());
// - add TU index to model
if (rowOffset != null) {
final int row = display.getCurrentPage() * display.getPageSize() + rowOffset;
Log.info("row calculated as "+row);
dispatcher.execute(new GetTransUnits(
documentId,
row,
1), new AsyncCallback<GetTransUnitsResult>() {
@Override
public void onFailure(Throwable e) {
Log.error(e.getMessage(), e);
}
@Override
public void onSuccess(GetTransUnitsResult result) {
// FIXME should this be row, rowOffset, or something else?
Log.info("TransUnit Id"+result.getUnits().get(0).getId());
display.getTableModel().setRowValueOverride(row, result.getUnits().get(0));
}
});
} else {
display.getTableModel().clearCache();
}
}
}
}
}));
registerHandler(eventBus.addHandler(TransUnitEditEvent.getType(), new TransUnitEditEventHandler() {
@Override
public void onTransUnitEdit(TransUnitEditEvent event) {
if(documentId != null && documentId.equals(event.getDocumentId())) {
if(selectedTransUnit != null && selectedTransUnit.getId().equals(event.getTransUnitId())) {
// handle change in current selection
if(!event.getSessionId().equals(identity.getSessionId()))
eventBus.fireEvent(new NotificationEvent(Severity.Warning, "Warning: This Translation Unit is being edited by someone else."));
}
//display.getTableModel().clearCache();
//display.reloadPage();
}
}
}));
registerHandler(eventBus.addHandler(NavTransUnitEvent.getType(), new NavTransUnitHandler() {
@Override
public void onNavTransUnit(NavTransUnitEvent event) {
if(selectedTransUnit != null) {
int step = event.getStep();
Log.info("Step "+step);
//Send message to server to stop editing current selection
//stopEditing(selectedTransUnit);
InlineTargetCellEditor editor = display.getTargetCellEditor();
//If goto Next or Prev Trans Unit
if (event.getRowType() == null ) {
if (step > 0)
editor.handleNext();
else
editor.handlePrev();
}
//If goto Next or Prev Fuzzy/New Trans Unit
if (event.getRowType() == ContentState.NeedReview ||
event.getRowType() == ContentState.New) {
if (step > 0)
editor.handleNextState(event.getRowType());
else
editor.handlePrevState(event.getRowType());
}
}
}
}));
registerHandler(eventBus.addHandler(TransMemoryCopyEvent.getType(), new TransMemoryCopyHandler() {
@Override
public void onTransMemoryCopy(TransMemoryCopyEvent event) {
// When user clicked on copy-to-target anchor, it checks
// if user is editing any target. Notifies if not.
if (display.getTargetCellEditor().isEditing()) {
display.getTargetCellEditor().setText(event.getTargetResult());
eventBus.fireEvent( new NotificationEvent(Severity.Info, "Message has been copied to the target."));
}
else
eventBus.fireEvent( new NotificationEvent(Severity.Error, "Please open the target in the editor first."));
}
}));
Event.addNativePreviewHandler(new NativePreviewHandler() {
@Override
public void onPreviewNativeEvent(NativePreviewEvent event) {
//Only when the Table is showed and editor is closed, the keyboard event will be processed.
if(display.asWidget().isVisible() && !display.getTargetCellEditor().isFocused()) {
NativeEvent nativeEvent = event.getNativeEvent();
String nativeEventType = nativeEvent.getType();
int keyCode = nativeEvent.getKeyCode();
boolean shiftKey = nativeEvent.getShiftKey();
boolean altKey = nativeEvent.getAltKey();
boolean ctrlKey = nativeEvent.getCtrlKey();
if(nativeEventType.equals("keypress") && !shiftKey && !altKey && !ctrlKey) {
//PageDown key
switch(keyCode) {
case KeyCodes.KEY_PAGEDOWN:
Log.info("fired event of type " + event.getAssociatedType().getClass().getName());
if(!display.isLastPage())
gotoNextPage();
event.cancel();
break;
//PageUp key
case KeyCodes.KEY_PAGEUP:
Log.info("fired event of type " + event.getAssociatedType().getClass().getName());
if(!display.isFirstPage())
gotoPreviousPage();
event.cancel();
break;
//Home
case KeyCodes.KEY_HOME:
Log.info("fired event of type " + event.getAssociatedType().getClass().getName());
display.gotoFirstPage();
event.cancel();
break;
//End
case KeyCodes.KEY_END:
Log.info("fired event of type " + event.getAssociatedType().getClass().getName());
display.gotoLastPage();
event.cancel();
break;
default:
break;
}
}
}
}
});
display.gotoFirstPage();
}
public Integer getRowOffset(TransUnitId transUnitId) {
// TODO inefficient!
for (int i=0; i<display.getRowValues().size(); i++) {
if (transUnitId.equals(display.getTransUnitValue(i).getId())) {
Log.info("getRowOffset returning "+i);
return i;
}
}
return null;
}
private final TableModelHandler<TransUnit> tableModelHandler = new TableModelHandler<TransUnit>() {
@Override
public void requestRows(final Request request, final Callback<TransUnit> callback) {
int numRows = request.getNumRows();
int startRow = request.getStartRow();
Log.info("Table requesting" + numRows + " starting from "+ startRow);
if(documentId == null){
callback.onFailure(new RuntimeException("No DocumentId"));
return;
}
dispatcher.execute(new GetTransUnits(documentId, startRow, numRows), new AsyncCallback<GetTransUnitsResult>() {
@Override
public void onSuccess(GetTransUnitsResult result) {
SerializableResponse<TransUnit> response = new SerializableResponse<TransUnit>(
result.getUnits());
Log.debug("Got " + result.getUnits().size() +" rows back");
callback.onRowsReady(request, response);
Log.info("Total of " + result.getTotalCount() + " rows available");
display.getTableModel().setRowCount(result.getTotalCount());
// lastRowNum = display.getTableModel().getRowCount()%display.getPageSize();
// if(lastRowNum == 0)
// lastRowNum = MAX_PAGE_ROW;
//Log.info("Last Row of Last Page " + lastRowNum);
}
@Override
public void onFailure(Throwable caught) {
if(caught instanceof AuthenticationError) {
eventBus.fireEvent( new NotificationEvent(Severity.Error, "Not logged in!"));
}
else if(caught instanceof AuthorizationError) {
eventBus.fireEvent( new NotificationEvent(Severity.Error, "Failed to load data from Server"));
}
else {
Log.error("GetTransUnits failure " + caught, caught);
eventBus.fireEvent( new NotificationEvent(Severity.Error, "An unknown error occurred"));
}
}
});
}
@Override
public boolean onSetRowValue(int row, TransUnit rowValue) {
dispatcher.execute(
new UpdateTransUnit(rowValue.getId(), rowValue.getTarget(),rowValue.getStatus()),
new AsyncCallback<UpdateTransUnitResult>() {
@Override
public void onFailure(Throwable caught) {
Log.error("UpdateTransUnit failure " + caught, caught);
eventBus.fireEvent(new NotificationEvent(Severity.Error, "Failed to update Translation Unit"));
}
@Override
public void onSuccess(UpdateTransUnitResult result) {
eventBus.fireEvent(new NotificationEvent(Severity.Info, "Saved change to Translation Unit"));
}
});
dispatcher.execute(
new EditingTranslationAction(
rowValue.getId(),
EditState.StopEditing),
new AsyncCallback<EditingTranslationResult>() {
@Override
public void onFailure(Throwable caught) {
Log.error("EditingTranslationAction failure " + caught, caught);
eventBus.fireEvent(new NotificationEvent(Severity.Error, "Failed to Stop Editing TransUnit"));
}
@Override
public void onSuccess(EditingTranslationResult result) {
//eventBus.fireEvent(new NotificationEvent(Severity.Warning, "TransUnit Editing is finished"));
}
});
return true;
}
public void onCancel(TransUnit rowValue) {
//stopEditing(rowValue);
}
@Override
public void gotoNextRow(int row) {
curPage = display.getCurrentPage();
curRowIndex = curPage*50+row;
if (curRowIndex < lastRowIndex)
transIdNextNewCache.clear();
int rowIndex = curPage*50+row+1;
if(rowIndex < display.getTableModel().getRowCount()) {
int pageNum = rowIndex/(MAX_PAGE_ROW+1);
int rowNum = rowIndex%(MAX_PAGE_ROW+1);
if(pageNum != curPage)
display.gotoPage(pageNum, false);
selectedTransUnit = display.getTransUnitValue(rowNum);
display.gotoRow(rowNum);
}
// int nextRow = row+1;
// Log.info("Next Row"+nextRow);
// if(!display.isLastPage()) {
// if(nextRow <= MAX_PAGE_ROW && nextRow >= 0) {
// cancelEdit();
// selectedTransUnit = display.getTransUnitValue(nextRow);
// display.gotoRow(nextRow);
// } else if(nextRow > MAX_PAGE_ROW) {
// cancelEdit();
// display.gotoNextPage();
// selectedTransUnit = display.getTransUnitValue(0);
// display.gotoRow(0);
// } else {
// if (nextRow <= lastRowNum-1) {
// cancelEdit();
// selectedTransUnit = display.getTransUnitValue(nextRow);
// display.gotoRow(nextRow);
}
@Override
public void gotoPrevRow(int row) {
curPage = display.getCurrentPage();
int rowIndex = curPage*50+row-1;
if(rowIndex >= 0) {
int pageNum = rowIndex/(MAX_PAGE_ROW+1);
int rowNum = rowIndex%(MAX_PAGE_ROW+1);
if(pageNum != curPage)
display.gotoPage(pageNum, false);
selectedTransUnit = display.getTransUnitValue(rowNum);
display.gotoRow(rowNum);
}
// int prevRow = row-1;
// Log.info("Prev Row"+prevRow);
// if(prevRow < MAX_PAGE_ROW && prevRow >= 0) {
// cancelEdit();
// selectedTransUnit = display.getTransUnitValue(prevRow);
// display.gotoRow(prevRow);
// } else if(prevRow < 0) {
// Log.info("Current page"+display.getCurrentPage());
// if(!display.isFirstPage()) {
// cancelEdit();
// display.gotoPreviousPage();
// selectedTransUnit = display.getTransUnitValue(MAX_PAGE_ROW);
// display.gotoRow(MAX_PAGE_ROW);
}
@Override
public void nextFuzzyIndex(int row, ContentState state) {
//Convert row number to row Index in table
curPage = display.getCurrentPage();
curRowIndex = curPage*TableConstants.PAGE_SIZE+row;
Log.info("Current Row Index"+curRowIndex);
if(curRowIndex < display.getTableModel().getRowCount())
gotoNextState(state);
}
@Override
public void prevFuzzyIndex(int row, ContentState state) {
//Convert row number to row Index in table
curPage = display.getCurrentPage();
curRowIndex = curPage*TableConstants.PAGE_SIZE+row;
Log.info("Current Row Index"+curRowIndex);
if(curRowIndex > 0)
gotoPrevState(state);
}
};
private void stopEditing(TransUnit rowValue) {
dispatcher.execute(
new EditingTranslationAction(
rowValue.getId(),
EditState.StopEditing),
new AsyncCallback<EditingTranslationResult>() {
@Override
public void onSuccess(EditingTranslationResult result) {
//eventBus.fireEvent(new NotificationEvent(Severity.Warning, "TransUnit Editing is finished"));
}
@Override
public void onFailure(Throwable caught) {
Log.error("EditingTranslationAction failure " + caught, caught);
eventBus.fireEvent(new NotificationEvent(Severity.Error, "Failed to Stop Editing TransUnit"));
}
});
}
private void startEditing(TransUnit rowValue) {
//Send a START_EDIT event
dispatcher.execute(
new EditingTranslationAction(
rowValue.getId(),
EditState.StartEditing),
new AsyncCallback<EditingTranslationResult>() {
@Override
public void onFailure(Throwable caught) {
Log.error("EditingTranslationAction failure " + caught, caught);
eventBus.fireEvent(new NotificationEvent(Severity.Error, "Failed to Lock TransUnit"));
}
@Override
public void onSuccess(EditingTranslationResult result) {
}
});
}
boolean isReqComplete = true;
private int lastRowIndex;
private void cacheNextFuzzy(final ContentState desiredState, final StatesCacheCallback callBack) {
isReqComplete = false;
dispatcher.execute(new GetTransUnitsStates(documentId, curRowIndex, 3, false, desiredState), new AsyncCallback<GetTransUnitsStatesResult>() {
@Override
public void onSuccess(GetTransUnitsStatesResult result) {
isReqComplete = true;
if(!result.getUnits().isEmpty()) {
if(desiredState == ContentState.NeedReview)
transIdNextFuzzyCache = result.getUnits();
if(desiredState == ContentState.New)
transIdNextNewCache = result.getUnits();
callBack.nextFuzzy(desiredState);
}
}
@Override
public void onFailure(Throwable caught) {
Log.error("GetTransUnitsStates failure " + caught, caught);
}
});
}
private void cachePrevFuzzy(final ContentState desiredState, final StatesCacheCallback callBack) {
isReqComplete = false;
dispatcher.execute(new GetTransUnitsStates(documentId, curRowIndex, 3, true, desiredState), new AsyncCallback<GetTransUnitsStatesResult>() {
@Override
public void onSuccess(GetTransUnitsStatesResult result) {
isReqComplete = true;
if(!result.getUnits().isEmpty()) {
if(desiredState == ContentState.NeedReview)
transIdPrevFuzzyCache = result.getUnits();
if(desiredState == ContentState.New)
transIdPrevNewCache = result.getUnits();
callBack.prevFuzzy(desiredState);
}
}
@Override
public void onFailure(Throwable caught) {
Log.error("GetTransUnitsStates failure " + caught, caught);
}
});
}
private void gotoPrevState(ContentState desiredState) {
Log.info("Previous State: "+desiredState);
if(desiredState == ContentState.NeedReview) {
//Clean the cache for Next Fuzzy to avoid issues about cache is obsolete
transIdNextFuzzyCache.clear();
//If the catch of fuzzy row is empty and request is complete, generate one
if(transIdPrevFuzzyCache.isEmpty()) {
if (isReqComplete)
cachePrevFuzzy(desiredState, cacheCallback);
} else {
int size = transIdPrevFuzzyCache.size();
int offset = transIdPrevFuzzyCache.get(size-1).intValue();
if(curRowIndex > offset) {
for (int i = 0; i < size; i++) {
int fuzzyRowIndex = transIdPrevFuzzyCache.get(i).intValue();
if (curRowIndex > fuzzyRowIndex) {
int pageNum = fuzzyRowIndex/(TableConstants.PAGE_SIZE);
int rowNum = fuzzyRowIndex%(TableConstants.PAGE_SIZE);
Log.info("Page of Next Fuzzy "+pageNum);
Log.info("Row Index of Next Fuzzy "+rowNum);
cancelEdit();
if(pageNum != curPage)
display.gotoPage(pageNum, false);
display.gotoRow(rowNum);
selectedTransUnit = display.getTransUnitValue(rowNum);
break;
}
}
} else {
transIdPrevFuzzyCache.clear();
cachePrevFuzzy(desiredState, cacheCallback);
}
}
} else if(desiredState == ContentState.New) {
//Clean the cache for Previous New to avoid issues about cache is obsolete
transIdNextNewCache.clear();
//If the cache of Previous new is empty and request is complete, generate one
if(transIdPrevNewCache.isEmpty()) {
if (isReqComplete)
cachePrevFuzzy(desiredState, cacheCallback);
} else {
int size = transIdPrevNewCache.size();
int offset = transIdPrevNewCache.get(size-1).intValue();
if(curRowIndex > offset) {
for (int i = 0; i < size; i++) {
int fuzzyRowIndex = transIdPrevNewCache.get(i).intValue();
if (curRowIndex > fuzzyRowIndex) {
int pageNum = fuzzyRowIndex/(TableConstants.PAGE_SIZE);
int rowNum = fuzzyRowIndex%(TableConstants.PAGE_SIZE);
Log.info("Page of Prev New "+pageNum);
Log.info("Row Index of Prev New "+rowNum);
cancelEdit();
if(pageNum != curPage)
display.gotoPage(pageNum, false);
display.gotoRow(rowNum);
selectedTransUnit = display.getTransUnitValue(rowNum);
break;
}
}
} else {
transIdPrevNewCache.clear();
cachePrevFuzzy(desiredState, cacheCallback);
}
}
}
}
StatesCacheCallback cacheCallback = new StatesCacheCallback() {
@Override
public void nextFuzzy(ContentState state) {
gotoNextState(state);
}
@Override
public void prevFuzzy(ContentState state) {
gotoPrevState(state);
}
};
private void gotoNextState(ContentState desiredState) {
Log.info("Next State: "+desiredState);
if(desiredState == ContentState.NeedReview) {
transIdPrevFuzzyCache.clear();
//If the cache of next fuzzy is empty, generate one
if(transIdNextFuzzyCache.isEmpty()) {
if(isReqComplete)
cacheNextFuzzy(desiredState, cacheCallback);
} else {
int size = transIdNextFuzzyCache.size();
int offset = transIdNextFuzzyCache.get(size-1).intValue();
if(curRowIndex < offset) {
for (int i = 0; i < size; i++) {
int fuzzyRowIndex = transIdNextFuzzyCache.get(i).intValue();
if (curRowIndex < fuzzyRowIndex) {
int pageNum = fuzzyRowIndex/(TableConstants.PAGE_SIZE);
int rowNum = fuzzyRowIndex%(TableConstants.PAGE_SIZE);
Log.info("Page of Next Fuzzy "+pageNum);
Log.info("Row Index of Next Fuzzy"+rowNum);
cancelEdit();
if(pageNum != curPage)
display.gotoPage(pageNum, false);
display.gotoRow(rowNum);
selectedTransUnit = display.getTransUnitValue(rowNum);
break;
}
}
} else {
transIdNextFuzzyCache.clear();
cacheNextFuzzy(desiredState, cacheCallback);
}
}
} else if (desiredState == ContentState.New) {
transIdPrevNewCache.clear();
//If the cache of next new is empty, generate one
if(transIdNextNewCache.isEmpty()) {
if(isReqComplete)
cacheNextFuzzy(desiredState, cacheCallback);
} else {
int size = transIdNextNewCache.size();
int offset = transIdNextNewCache.get(size-1).intValue();
if(curRowIndex < offset) {
for (int i = 0; i < size; i++) {
int fuzzyRowIndex = transIdNextNewCache.get(i).intValue();
if (curRowIndex < fuzzyRowIndex) {
int pageNum = fuzzyRowIndex/(TableConstants.PAGE_SIZE);
int rowNum = fuzzyRowIndex%(TableConstants.PAGE_SIZE);
Log.info("Page of Next New "+pageNum);
Log.info("Row Index of Next New"+rowNum);
cancelEdit();
if(pageNum != curPage)
display.gotoPage(pageNum, false);
display.gotoRow(rowNum);
selectedTransUnit = display.getTransUnitValue(rowNum);
break;
}
}
} else {
transIdNextNewCache.clear();
cacheNextFuzzy(desiredState, cacheCallback);
}
}
}
}
public TransUnit getSelectedTransUnit() {
return selectedTransUnit;
}
@Override
protected void onPlaceRequest(PlaceRequest request) {
}
@Override
protected void onUnbind() {
}
@Override
public void refreshDisplay() {
}
@Override
public void revealDisplay() {
}
@Override
public void gotoFirstPage() {
display.gotoFirstPage();
}
@Override
public void gotoLastPage() {
display.gotoLastPage();
}
@Override
public void gotoNextPage() {
display.gotoNextPage();
}
@Override
public void gotoPage(int page, boolean forced) {
display.gotoPage(page, forced);
}
@Override
public void gotoPreviousPage() {
display.gotoPreviousPage();
}
@Override
public HandlerRegistration addPageChangeHandler(PageChangeHandler handler) {
return display.getPageChangeHandlers().addPageChangeHandler(handler);
}
@Override
public HandlerRegistration addPageCountChangeHandler(
PageCountChangeHandler handler) {
return display.getPageCountChangeHandlers().addPageCountChangeHandler(handler);
}
public DocumentId getDocumentId() {
return documentId;
}
public void cancelEdit() {
display.getTargetCellEditor().cancelEdit();
}
} |
package org.inaetics.dronessimulator.common.vector;
import org.inaetics.dronessimulator.common.Tuple;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Three-dimensional polar coordinate.
*/
public class D3PolarCoordinate implements Serializable {
/** The unity coordinate. */
public static final D3PolarCoordinate UNIT = new D3PolarCoordinate(0,0, 1);
/** The angle between the x and y axis. */
private final double angle1_x_y; // Between 0 and 2pi
/** The angle between the x and z axis. */
private final double angle2_x_z; // Between -0.5 * pi and 0.5 * pi
/** The distance to the coordinate. */
private final double length;
/**
* Instantiates a new three-dimensional unity polar coordinate.
*/
public D3PolarCoordinate() {
this(0,0,1);
}
/**
* Instantiates a new three-dimensional polar coordinate with the given coordinates.
* @param angle1_x_y_ The angle between the x and y axis.
* @param angle2_x_z_ The angle between the x and z axis.
* @param length_ The distance to the coordinate.
*/
public D3PolarCoordinate(double angle1_x_y_, double angle2_x_z_, double length_) {
double angle1_x_y = angle1_x_y_;
double angle2_x_z = angle2_x_z_;
double length = length_;
// Change angles to keep the length always positive.
if(length < 0) {
angle1_x_y = angle1_x_y + Math.PI;
angle2_x_z = -1 * angle2_x_z;
length = -1 * length;
}
// Normalize the angles.
Tuple<Double, Double> normalizedAngles = normalizeAngles(angle1_x_y, angle2_x_z);
this.angle1_x_y = normalizedAngles.getLeft();
this.angle2_x_z = normalizedAngles.getRight();
this.length = length;
}
/**
* Normalizes the given angles such that the first angle is between 0 and 2pi and the second angle is between -0.5pi
* and 0.5pi.
* @param angle1_x_y The angle between the x and y axis.
* @param angle2_x_z The angle between the x and z axis.
* @return A tuple containing the normalized angles in the order they were given.
*/
private static Tuple<Double, Double> normalizeAngles(double angle1_x_y, double angle2_x_z) {
double a1_ = angle1_x_y;
double a2_ = angle2_x_z % (2 * Math.PI);
if(angle2_x_z > 0.5 * Math.PI) {
if(angle2_x_z <= Math.PI) {
// 0.5 PI < angle2 <= PI
// Mirror a1_
// a2_ starts at the other side now
a2_ = Math.PI - a2_;
a1_ = a1_ + Math.PI;
} else if(angle2_x_z <= 1.5 * Math.PI) {
// PI < angle2 <= 1.5 PI
//Mirror a1_
// a2_ starts at the other side now
a2_ = -1 * (a2_ - Math.PI);
a1_ = a1_ + Math.PI;
} else {
// 1.5 PI < angle2 < 2 PI
a2_ = a2_ - 2 * Math.PI;
}
} else if(angle2_x_z < -0.5 * Math.PI) {
if(angle2_x_z >= -1 * Math.PI) {
// -0.5 PI < angle2 <= -PI
// Mirror a1_
// a2_ starts at the other side now
a2_ = -1 * Math.PI - a2_;
a1_ = a1_ + Math.PI;
} else if(angle2_x_z >= -1.5 * Math.PI) {
// -PI < angle2 <= -1.5 PI
//Mirror a1_
// a2_ starts at the other side now
a2_ = Math.abs(a2_) - Math.PI;
a1_ = a1_ + Math.PI;
} else {
// -1.5 PI < angle2 <-2 PI
a2_ = 2 * Math.PI + a2_;
}
}
if(a1_ < 0) {
a1_ = 2 * Math.PI + (a1_ % (2 * Math.PI));
} else {
a1_ = a1_ % (2 * Math.PI);
}
return new Tuple<>(a1_, a2_);
}
/**
* Returns the angle between the x and y axis in radians.
* @return The angle between the x and y axis.
*/
public double getAngle1() {
return angle1_x_y;
}
/**
* Returns the angle between the x and y axis in degrees.
* @return The angle between the x and y axis.
*/
public double getAngle1Degrees() {
return radianToDegrees(this.angle1_x_y);
}
/**
* Returns the angle between the x and z axis in radians.
* @return The angle between the x and z axis.
*/
public double getAngle2() {
return angle2_x_z;
}
/**
* Returns the angle between the x and z axis in degrees.
* @return The angle between the x and z axis.
*/
public double getAngle2Degrees() {
return radianToDegrees(this.angle2_x_z);
}
/**
* Produces a new coordinate which is rotated with the given angles.
* @param angle1_x_y The relative rotation between the x and y axis.
* @param angle2_x_z The relative rotation between the x and z axis.
* @return The produced coordinate.
*/
public D3PolarCoordinate rotate(double angle1_x_y, double angle2_x_z) {
return new D3PolarCoordinate(this.angle1_x_y + angle1_x_y, this.angle2_x_z + angle2_x_z, this.length);
}
/**
* Produces a new coordinate which is moved (in distance) with the given factor.
* @param scalar The scalar for the distance.
* @return The produced coordinate.
*/
public D3PolarCoordinate scale(double scalar) {
return new D3PolarCoordinate(this.angle1_x_y, this.angle2_x_z, this.length * scalar);
}
/**
* Returns the distance of this coordinate.
* @return The distance of this coordinate.
*/
public double getLength() {
return length;
}
/**
* Converts this polar coordinate to a vector.
* @return The resulting vector.
*/
public D3Vector toVector() {
double xy_length = Math.cos(this.angle2_x_z) * this.length;
double x_length = Math.cos(this.angle1_x_y) * xy_length;
double y_length = Math.sin(this.angle1_x_y) * xy_length;
double z_length = Math.sin(this.angle2_x_z) * this.length;
return new D3Vector(x_length, y_length, z_length);
}
/**
* Returns the string representation of this coordinate.
* @return The string representation.
*/
public String toString() {
return "(a1:" + this.angle1_x_y + ", a2: " + this.angle2_x_z + " l:" + this.length + ")";
}
public static D3PolarCoordinate fromString(String str){
Pattern pattern = Pattern.compile("\\(a1:(-?[0-9.]*), a2:(-?[0-9.]*), l:(-?[0-9.]*)\\)");
Matcher matcher = pattern.matcher(str);
if (matcher.matches()) {
return new D3PolarCoordinate(Double.parseDouble(matcher.group(1)), Double.parseDouble(matcher.group(2)), Double.parseDouble(matcher.group(3)));
}
return null;
}
/**
* Tests whether the given object is equal to this coordinate.
* @param o The object to test.
* @return Whether the given object is equal to this coordinate.
*/
@Override
public boolean equals(Object o) {
if(o instanceof D3PolarCoordinate) {
D3PolarCoordinate other = (D3PolarCoordinate) o;
return BigDecimal.valueOf(this.getAngle1()).compareTo(BigDecimal.valueOf(other.getAngle1())) == 0
&& BigDecimal.valueOf(this.getAngle2()).compareTo(BigDecimal.valueOf(other.getAngle2())) == 0
&& BigDecimal.valueOf(this.getLength()).compareTo(BigDecimal.valueOf(other.getLength())) == 0;
} else {
return false;
}
}
@Override
public int hashCode() {
return (int) Math.round(this.getAngle1() + this.getAngle2() + this.getLength());
}
/**
* Converts the given angle in radians to degrees.
* @param radians The angle in radians.
* @return The given angle in degrees.
*/
public static double radianToDegrees(double radians) {
return (radians / Math.PI) * 180;
}
/**
* Converts the given angle in degrees to radians.
* @param degrees The angle in degrees.
* @return The given angle in radians.
*/
public static double degreesToRadian(double degrees) {
return (degrees / 180) * Math.PI;
}
} |
package org.opennms.netmgt.provision;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.log4j.Category;
import org.opennms.core.utils.ThreadCategory;
import org.opennms.netmgt.EventConstants;
import org.opennms.netmgt.config.RWSConfig;
import org.opennms.netmgt.config.RancidAdapterConfig;
import org.opennms.netmgt.dao.NodeDao;
import org.opennms.netmgt.model.OnmsAssetRecord;
import org.opennms.netmgt.model.OnmsNode;
import org.opennms.netmgt.model.events.EventBuilder;
import org.opennms.netmgt.model.events.EventForwarder;
import org.opennms.rancid.ConnectionProperties;
import org.opennms.rancid.RWSClientApi;
import org.opennms.rancid.RancidNode;
import org.opennms.rancid.RancidNodeAuthentication;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
/**
* A Rancid provisioning adapter for integration with OpenNMS Provisoning daemon API.
*
* @author <a href="mailto:guglielmoincisa@gmail.com">Guglielmo Incisa</a>
* @author <a href="mailto:antonio@opennms.it">Antonio Russo</a>
*
*/
public class RancidProvisioningAdapter implements ProvisioningAdapter, InitializingBean {
private NodeDao m_nodeDao;
private EventForwarder m_eventForwarder;
private RWSConfig m_rwsConfig;
private RancidAdapterConfig m_rancidAdapterConfig;
private ConnectionProperties m_cp;
private static final String MESSAGE_PREFIX = "Rancid provisioning failed: ";
private static final String ADAPTER_NAME="RANCID Provisioning Adapter";
private static final String RANCID_COMMENT="node provisioned by opennms";
private volatile static ConcurrentMap<Integer, RancidNodeContainer> m_onmsNodeRancidNodeMap;
public void afterPropertiesSet() throws Exception {
//FIXME this should be done by spring
RWSClientApi.init();
m_cp = new ConnectionProperties(m_rwsConfig.getBaseUrl().getServer_url(),m_rwsConfig.getBaseUrl().getDirectory(),m_rwsConfig.getBaseUrl().getTimeout());
log().debug("Connections used :" +m_rwsConfig.getBaseUrl().getServer_url()+m_rwsConfig.getBaseUrl().getDirectory());
log().debug("timeout: "+m_rwsConfig.getBaseUrl().getTimeout());
Assert.notNull(m_nodeDao, "Rancid Provisioning Adapter requires nodeDao property to be set.");
List<OnmsNode> nodes = m_nodeDao.findAllProvisionedNodes();
m_onmsNodeRancidNodeMap = new ConcurrentHashMap<Integer, RancidNodeContainer>(nodes.size());
for (OnmsNode onmsNode : nodes) {
RancidNode rNode = getSuitableRancidNode(onmsNode);
RancidNodeAuthentication rAuth = getSuitableRancidNodeAuthentication(onmsNode);
m_onmsNodeRancidNodeMap.putIfAbsent(onmsNode.getId(), new RancidNodeContainer(rNode, rAuth));
}
}
private class RancidNodeContainer {
private RancidNode m_node;
private RancidNodeAuthentication m_auth;
public RancidNodeContainer(RancidNode node, RancidNodeAuthentication auth) {
setNode(node);
setAuth(auth);
}
public void setNode(RancidNode node) {
m_node = node;
}
public RancidNode getNode() {
return m_node;
}
public void setAuth(RancidNodeAuthentication auth) {
m_auth = auth;
}
public RancidNodeAuthentication getAuth() {
return m_auth;
}
}
@Transactional
public void addNode(int nodeId) throws ProvisioningAdapterException {
log().debug("RANCID PROVISIONING ADAPTER CALLED addNode");
try {
OnmsNode node = m_nodeDao.get(nodeId);
Assert.notNull(node, "Rancid Provisioning Adapter addNode method failed to return node for given nodeId:"+nodeId);
RancidNode rNode = getSuitableRancidNode(node);
RWSClientApi.createRWSRancidNode(m_cp, rNode);
RancidNodeAuthentication rAuth = getSuitableRancidNodeAuthentication(node);
RWSClientApi.createOrUpdateRWSAuthNode(m_cp, rAuth);
m_onmsNodeRancidNodeMap.put(Integer.valueOf(nodeId), new RancidNodeContainer(rNode, rAuth));
} catch (Exception e) {
sendAndThrow(nodeId, e);
}
}
@Transactional
public void updateNode(int nodeId) throws ProvisioningAdapterException {
log().debug("RANCID PROVISIONING ADAPTER CALLED updateNode");
try {
OnmsNode node = m_nodeDao.get(nodeId);
RancidNode rNode = getSuitableRancidNode(node);
RWSClientApi.createOrUpdateRWSRancidNode(m_cp, rNode);
RancidNodeAuthentication rAuth = getSuitableRancidNodeAuthentication(node);
RWSClientApi.createOrUpdateRWSAuthNode(m_cp, getSuitableRancidNodeAuthentication(node));
m_onmsNodeRancidNodeMap.replace(node.getId(), new RancidNodeContainer(rNode, rAuth));
} catch (Exception e) {
sendAndThrow(nodeId, e);
}
}
@Transactional
public void deleteNode(int nodeId) throws ProvisioningAdapterException {
log().debug("RANCID PROVISIONING ADAPTER CALLED deleteNode");
/*
* The work to maintain the hashmap boils down to needing to do deletes, so
* here we go.
*/
try {
RancidNode rNode = m_onmsNodeRancidNodeMap.get(Integer.valueOf(nodeId)).getNode();
RWSClientApi.deleteRWSRancidNode(m_cp, rNode);
RancidNodeAuthentication rAuth = m_onmsNodeRancidNodeMap.get(Integer.valueOf(nodeId)).getAuth();
RWSClientApi.deleteRWSAuthNode(m_cp, rAuth);
m_onmsNodeRancidNodeMap.remove(Integer.valueOf(nodeId));
} catch (Exception e) {
sendAndThrow(nodeId, e);
}
}
public void nodeConfigChanged(int nodeid) throws ProvisioningAdapterException {
throw new ProvisioningAdapterException("configChanged event not yet implemented.");
}
private void sendAndThrow(int nodeId, Exception e) {
log().debug("RANCID PROVISIONING ADAPTER CALLED sendAndThrow");
m_eventForwarder.sendNow(buildEvent(EventConstants.PROVISIONING_ADAPTER_FAILED, nodeId).addParam("reason", MESSAGE_PREFIX+e.getLocalizedMessage()).getEvent());
throw new ProvisioningAdapterException(MESSAGE_PREFIX, e);
}
private EventBuilder buildEvent(String uei, int nodeId) {
log().debug("RANCID PROVISIONING ADAPTER CALLED EventBuilder");
EventBuilder builder = new EventBuilder(uei, "Provisioner", new Date());
builder.setNodeid(nodeId);
return builder;
}
public NodeDao getNodeDao() {
return m_nodeDao;
}
public void setNodeDao(NodeDao dao) {
m_nodeDao = dao;
}
public void setEventForwarder(EventForwarder eventForwarder) {
m_eventForwarder = eventForwarder;
}
public EventForwarder getEventForwarder() {
return m_eventForwarder;
}
private static Category log() {
return ThreadCategory.getInstance(RancidProvisioningAdapter.class);
}
public RWSConfig getRwsConfig() {
return m_rwsConfig;
}
public void setRwsConfig(RWSConfig rwsConfig) {
m_rwsConfig = rwsConfig;
}
public RancidAdapterConfig getRancidAdapterConfig() {
return m_rancidAdapterConfig;
}
public void setRancidAdapterConfig(RancidAdapterConfig rancidAdapterConfig) {
m_rancidAdapterConfig = rancidAdapterConfig;
}
public String getName() {
return ADAPTER_NAME;
}
private RancidNode getSuitableRancidNode(OnmsNode node) {
//FIXME: Guglielmo, the group should be the foreign source of the node
// Antonio: I'm working on the configuration file and the group
// is written in the configuration file
// in principle you can provide rancid node to more then a group
// String group = node.getForeignSource();
// RancidNode r_node = new RancidNode(m_rancidAdapterConfig.getGroup(), node.getLabel());
String group = m_rancidAdapterConfig.getGroup();
RancidNode r_node = new RancidNode(group, node.getLabel());
//FIXME: Guglielmo, the device type is going to have to be mapped by SysObjectId...
//that should probably be in the RancidNode class
// It is in the Configuration file for Rancid ADapter
r_node.setDeviceType(RancidNode.DEVICE_TYPE_CISCO_IOS);
r_node.setStateUp(false);
r_node.setComment(RANCID_COMMENT);
return r_node;
}
private RancidNodeAuthentication getSuitableRancidNodeAuthentication(OnmsNode node) {
// RancidAutentication
RancidNodeAuthentication r_auth_node = new RancidNodeAuthentication();
r_auth_node.setDeviceName(node.getLabel());
OnmsAssetRecord asset_node = node.getAssetRecord();
if (asset_node.getUsername() != null) {
r_auth_node.setUser(asset_node.getUsername());
}
if (asset_node.getPassword() != null) {
r_auth_node.setPassword(asset_node.getPassword());
}
if (asset_node.getEnable() != null) {
r_auth_node.setEnablePass(asset_node.getEnable());
}
if (asset_node.getAutoenable() != null) {
r_auth_node.setAutoEnable(asset_node.getAutoenable().equals(OnmsAssetRecord.AUTOENABLED));
}
if (asset_node.getConnection() != null) {
r_auth_node.setConnectionMethod(asset_node.getUsername());
} else {
r_auth_node.setConnectionMethod(m_rancidAdapterConfig.getDefaultConnectionType());
}
return r_auth_node;
}
} |
package org.opennms.netmgt.provision;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import org.apache.log4j.Category;
import org.opennms.core.utils.ThreadCategory;
import org.opennms.netmgt.EventConstants;
import org.opennms.netmgt.config.RWSConfig;
import org.opennms.netmgt.config.RancidAdapterConfig;
import org.opennms.netmgt.dao.NodeDao;
import org.opennms.netmgt.model.OnmsAssetRecord;
import org.opennms.netmgt.model.OnmsIpInterface;
import org.opennms.netmgt.model.OnmsNode;
import org.opennms.netmgt.model.events.EventBuilder;
import org.opennms.netmgt.model.events.EventForwarder;
import org.opennms.netmgt.model.events.EventSubscriptionService;
import org.opennms.netmgt.model.events.annotations.EventHandler;
import org.opennms.netmgt.model.events.annotations.EventListener;
import org.opennms.netmgt.xml.event.Event;
import org.opennms.rancid.ConnectionProperties;
import org.opennms.rancid.RWSClientApi;
import org.opennms.rancid.RancidNode;
import org.opennms.rancid.RancidNodeAuthentication;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
/**
* A Rancid provisioning adapter for integration with OpenNMS Provisoning daemon API.
*
* @author <a href="mailto:guglielmoincisa@gmail.com">Guglielmo Incisa</a>
* @author <a href="mailto:antonio@opennms.it">Antonio Russo</a>
*
*/
@EventListener(name="RancidProvisioningAdapter:Listener")
public class RancidProvisioningAdapter extends SimpleQueuedProvisioningAdapter implements InitializingBean, org.opennms.netmgt.model.events.EventListener {
private NodeDao m_nodeDao;
private volatile EventForwarder m_eventForwarder;
private volatile EventSubscriptionService m_eventSubscriptionService;
private RWSConfig m_rwsConfig;
private RancidAdapterConfig m_rancidAdapterConfig;
private ConnectionProperties m_cp;
private static final String MESSAGE_PREFIX = "Rancid provisioning failed: ";
private static final String ADAPTER_NAME="RancidProvisioningAdapter";
private static final String RANCID_COMMENT="node provisioned by opennms";
public static final String NAME = "RancidProvisioningAdapter";
private volatile static ConcurrentMap<Integer, RancidNodeContainer> m_onmsNodeRancidNodeMap;
@Override
AdapterOperationSchedule createScheduleForNode(int nodeId, AdapterOperationType type) {
if (type.equals(AdapterOperationType.CONFIG_CHANGE)) {
String ipaddress = getSuitableIpForRancid(nodeId);
return new AdapterOperationSchedule(m_rancidAdapterConfig.getDelay(ipaddress),60000, m_rancidAdapterConfig.getRetries(ipaddress), TimeUnit.MILLISECONDS);
}
return new AdapterOperationSchedule();
}
public void afterPropertiesSet() throws Exception {
RWSClientApi.init();
Assert.notNull(m_rwsConfig, "Rancid Provisioning Adapter requires RWSConfig property to be set.");
m_cp = getRWSConnection();
Assert.notNull(m_nodeDao, "Rancid Provisioning Adapter requires nodeDao property to be set.");
List<OnmsNode> nodes = m_nodeDao.findAllProvisionedNodes();
m_onmsNodeRancidNodeMap = new ConcurrentHashMap<Integer, RancidNodeContainer>(nodes.size());
createMessageSelectorAndSubscribe();
for (OnmsNode onmsNode : nodes) {
RancidNode rNode = getSuitableRancidNode(onmsNode);
if (rNode != null) {
RancidNodeAuthentication rAuth = getSuitableRancidNodeAuthentication(onmsNode);
m_onmsNodeRancidNodeMap.putIfAbsent(onmsNode.getId(), new RancidNodeContainer(rNode, rAuth));
}
}
}
private ConnectionProperties getRWSConnection() {
log().debug("Connections used : " +m_rwsConfig.getBaseUrl().getServer_url()+m_rwsConfig.getBaseUrl().getDirectory());
log().debug("RWS timeout(sec): "+m_rwsConfig.getBaseUrl().getTimeout());
return new ConnectionProperties(m_rwsConfig.getBaseUrl().getServer_url(),m_rwsConfig.getBaseUrl().getDirectory(),m_rwsConfig.getBaseUrl().getTimeout());
}
private class RancidNodeContainer {
private RancidNode m_node;
private RancidNodeAuthentication m_auth;
public RancidNodeContainer(RancidNode node, RancidNodeAuthentication auth) {
setNode(node);
setAuth(auth);
}
public void setNode(RancidNode node) {
m_node = node;
}
public RancidNode getNode() {
return m_node;
}
public void setAuth(RancidNodeAuthentication auth) {
m_auth = auth;
}
public RancidNodeAuthentication getAuth() {
return m_auth;
}
}
@Transactional
public void doAdd(int nodeId) throws ProvisioningAdapterException {
log().debug("RANCID PROVISIONING ADAPTER CALLED addNode");
try {
OnmsNode node = m_nodeDao.get(nodeId);
Assert.notNull(node, "Rancid Provisioning Adapter addNode method failed to return node for given nodeId:"+nodeId);
RancidNode rNode = getSuitableRancidNode(node);
rNode.setStateUp(true);
RWSClientApi.createRWSRancidNode(m_cp, rNode);
RancidNodeAuthentication rAuth = getSuitableRancidNodeAuthentication(node);
RWSClientApi.createOrUpdateRWSAuthNode(m_cp, rAuth);
m_onmsNodeRancidNodeMap.put(Integer.valueOf(nodeId), new RancidNodeContainer(rNode, rAuth));
} catch (Exception e) {
sendAndThrow(nodeId, e);
}
}
@Transactional
public void doUpdate(int nodeId) throws ProvisioningAdapterException {
log().debug("RANCID PROVISIONING ADAPTER CALLED updateNode");
try {
OnmsNode node = m_nodeDao.get(nodeId);
RancidNode rNode = getSuitableRancidNode(node);
RWSClientApi.createOrUpdateRWSRancidNode(m_cp, rNode);
RancidNodeAuthentication rAuth = getSuitableRancidNodeAuthentication(node);
RWSClientApi.createOrUpdateRWSAuthNode(m_cp, getSuitableRancidNodeAuthentication(node));
m_onmsNodeRancidNodeMap.replace(node.getId(), new RancidNodeContainer(rNode, rAuth));
} catch (Exception e) {
sendAndThrow(nodeId, e);
}
}
@Transactional
public void doDelete(int nodeId) throws ProvisioningAdapterException {
log().debug("RANCID PROVISIONING ADAPTER CALLED deleteNode");
/*
* The work to maintain the hashmap boils down to needing to do deletes, so
* here we go.
*/
try {
RancidNode rNode = m_onmsNodeRancidNodeMap.get(Integer.valueOf(nodeId)).getNode();
RWSClientApi.deleteRWSRancidNode(m_cp, rNode);
RancidNodeAuthentication rAuth = m_onmsNodeRancidNodeMap.get(Integer.valueOf(nodeId)).getAuth();
RWSClientApi.deleteRWSAuthNode(m_cp, rAuth);
m_onmsNodeRancidNodeMap.remove(Integer.valueOf(nodeId));
} catch (Exception e) {
sendAndThrow(nodeId, e);
}
}
public void doNodeConfigChanged(int nodeId) throws ProvisioningAdapterException {
log().debug("RANCID PROVISIONING ADAPTER CALLED updateNode");
try {
if (m_onmsNodeRancidNodeMap.containsKey(Integer.valueOf(nodeId))) {
RancidNode rNode = m_onmsNodeRancidNodeMap.get(Integer.valueOf(nodeId)).getNode();
RWSClientApi.updateRWSRancidNode(m_cp, rNode);
} else {
throw new Exception("No node found for nodeid: " + nodeId);
}
} catch (Exception e) {
sendAndThrow(nodeId, e);
}
}
private void sendAndThrow(int nodeId, Exception e) {
log().debug("RANCID PROVISIONING ADAPTER CALLED sendAndThrow");
Event event = buildEvent(EventConstants.PROVISIONING_ADAPTER_FAILED, nodeId).addParam("reason", MESSAGE_PREFIX+e.getLocalizedMessage()).getEvent();
m_eventForwarder.sendNow(event);
throw new ProvisioningAdapterException(MESSAGE_PREFIX, e);
}
private EventBuilder buildEvent(String uei, int nodeId) {
log().debug("RANCID PROVISIONING ADAPTER CALLED EventBuilder");
EventBuilder builder = new EventBuilder(uei, "Provisioner", new Date());
builder.setNodeid(nodeId);
return builder;
}
public NodeDao getNodeDao() {
return m_nodeDao;
}
public void setNodeDao(NodeDao dao) {
m_nodeDao = dao;
}
public void setEventForwarder(EventForwarder eventForwarder) {
m_eventForwarder = eventForwarder;
}
public EventForwarder getEventForwarder() {
return m_eventForwarder;
}
private static Category log() {
return ThreadCategory.getInstance(RancidProvisioningAdapter.class);
}
public RWSConfig getRwsConfig() {
return m_rwsConfig;
}
public void setRwsConfig(RWSConfig rwsConfig) {
m_rwsConfig = rwsConfig;
}
public RancidAdapterConfig getRancidAdapterConfig() {
return m_rancidAdapterConfig;
}
public void setRancidAdapterConfig(RancidAdapterConfig rancidAdapterConfig) {
m_rancidAdapterConfig = rancidAdapterConfig;
}
public String getName() {
return ADAPTER_NAME;
}
private String getSuitableIpForRancid(int nodeid){
OnmsNode node = m_nodeDao.get(nodeid);
OnmsIpInterface primaryInterface = node.getPrimaryInterface();
if (primaryInterface == null) {
Set<OnmsIpInterface> ipInterfaces = node.getIpInterfaces();
for (OnmsIpInterface onmsIpInterface : ipInterfaces) {
return onmsIpInterface.getIpAddress();
}
}
return primaryInterface.getIpAddress();
}
private RancidNode getSuitableRancidNode(OnmsNode node) {
//The group should be the foreign source of the node
String group = node.getForeignSource();
if (group == null) return null;
RancidNode r_node = new RancidNode(group, node.getLabel());
//FIXME: Check the node categories if useNodecategories is true
r_node.setDeviceType(m_rancidAdapterConfig.getType(node.getSysObjectId()));
r_node.setStateUp(false);
r_node.setComment(RANCID_COMMENT);
return r_node;
}
private RancidNodeAuthentication getSuitableRancidNodeAuthentication(OnmsNode node) {
// RancidAutentication
RancidNodeAuthentication r_auth_node = new RancidNodeAuthentication();
r_auth_node.setDeviceName(node.getLabel());
OnmsAssetRecord asset_node = node.getAssetRecord();
if (asset_node.getUsername() != null) {
r_auth_node.setUser(asset_node.getUsername());
}
if (asset_node.getPassword() != null) {
r_auth_node.setPassword(asset_node.getPassword());
}
if (asset_node.getEnable() != null) {
r_auth_node.setEnablePass(asset_node.getEnable());
}
if (asset_node.getAutoenable() != null) {
r_auth_node.setAutoEnable(asset_node.getAutoenable().equals(OnmsAssetRecord.AUTOENABLED));
}
if (asset_node.getConnection() != null) {
r_auth_node.setConnectionMethod(asset_node.getConnection());
} else {
r_auth_node.setConnectionMethod("telnet");
}
return r_auth_node;
}
@Override
public boolean isNodeReady(AdapterOperation op) {
if (op.getType() == AdapterOperationType.CONFIG_CHANGE) {
Integer nodeid = op.getNodeId();
updateRancidNodeState(nodeid, true);
if ( m_rancidAdapterConfig.isCurTimeInSchedule(getSuitableIpForRancid(nodeid))) {
return true;
} else {
return false;
}
}
return true;
}
@Override
public void processPendingOperationForNode(AdapterOperation op) throws ProvisioningAdapterException {
if (op.getType() == AdapterOperationType.ADD) {
doAdd(op.getNodeId());
} else if (op.getType() == AdapterOperationType.UPDATE) {
doUpdate(op.getNodeId());
} else if (op.getType() == AdapterOperationType.DELETE) {
doDelete(op.getNodeId());
} else if (op.getType() == AdapterOperationType.CONFIG_CHANGE) {
doNodeConfigChanged(op.getNodeId());
}
}
@EventHandler(uei = EventConstants.RANCID_DOWNLOAD_FAILURE_UEI)
public void handleRancidDownLoadFailure(Event e) {
log().debug("get Event uei/id: " + e.getUei() + "/" + e.getDbid());
if (e.hasNodeid()) {
int nodeId = Long.valueOf(e.getNodeid()).intValue();
if (m_onmsNodeRancidNodeMap.containsKey(Integer.valueOf(nodeId))) {
updateRancidNodeState(nodeId, true);
doNodeConfigChanged(nodeId);
} else {
log().warn("node does not exist with nodeid: " + e.getNodeid());
}
}
}
@EventHandler(uei = EventConstants.RANCID_DOWNLOAD_SUCCESS_UEI)
public void handleRancidDownLoadSuccess(Event e) {
log().debug("get Event uei/id: " + e.getUei() + "/" + e.getDbid());
if (e.hasNodeid() ) {
int nodeId = Long.valueOf(e.getNodeid()).intValue();
if (m_onmsNodeRancidNodeMap.containsKey(Integer.valueOf(nodeId))) {
updateRancidNodeState(nodeId, false);
doNodeConfigChanged(nodeId);
} else {
log().warn("node does not exist with nodeid: " + e.getNodeid());
}
}
}
private void updateRancidNodeState(int nodeid, boolean up) {
RancidNodeContainer rcont = m_onmsNodeRancidNodeMap.get(Integer.valueOf(nodeid));
RancidNode rnode = rcont.getNode();
rnode.setStateUp(up);
rcont.setNode(rnode);
m_onmsNodeRancidNodeMap.put(nodeid, rcont);
}
public EventSubscriptionService getEventSubscriptionService() {
return m_eventSubscriptionService;
}
public void setEventSubscriptionService(
EventSubscriptionService eventSubscriptionService) {
m_eventSubscriptionService = eventSubscriptionService;
}
public void onEvent(Event e) {
if (e == null)
return;
if (e.getUei().equals(EventConstants.RANCID_DOWNLOAD_FAILURE_UEI))
handleRancidDownLoadFailure(e);
else if (e.getUei().equals(EventConstants.RANCID_DOWNLOAD_SUCCESS_UEI))
handleRancidDownLoadSuccess(e);
}
private void createMessageSelectorAndSubscribe() {
List<String> ueiList = new ArrayList<String>();
ueiList.add(EventConstants.RANCID_DOWNLOAD_FAILURE_UEI);
ueiList.add(EventConstants.RANCID_DOWNLOAD_SUCCESS_UEI);
getEventSubscriptionService().addEventListener(this, ueiList);
}
} |
package org.languagetool.rules.spelling.morfologik;
import org.jetbrains.annotations.Nullable;
import org.languagetool.AnalyzedSentence;
import org.languagetool.AnalyzedTokenReadings;
import org.languagetool.JLanguageTool;
import org.languagetool.Language;
import org.languagetool.rules.Categories;
import org.languagetool.rules.ITSIssueType;
import org.languagetool.rules.RuleMatch;
import org.languagetool.rules.spelling.SpellingCheckRule;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public abstract class MorfologikSpellerRule extends SpellingCheckRule {
protected MorfologikMultiSpeller speller1;
protected MorfologikMultiSpeller speller2;
protected Locale conversionLocale;
private boolean ignoreTaggedWords = false;
private boolean checkCompound = false;
private Pattern compoundRegex = Pattern.compile("-");
/**
* Get the filename, e.g., <tt>/resource/pl/spelling.dict</tt>.
*/
public abstract String getFileName();
@Override
public abstract String getId();
public MorfologikSpellerRule(ResourceBundle messages, Language language) throws IOException {
super(messages, language);
super.setCategory(Categories.TYPOS.getCategory(messages));
this.conversionLocale = conversionLocale != null ? conversionLocale : Locale.getDefault();
init();
setLocQualityIssueType(ITSIssueType.Misspelling);
}
@Override
public String getDescription() {
return messages.getString("desc_spelling");
}
public void setLocale(Locale locale) {
conversionLocale = locale;
}
/**
* Skip words that are known in the POS tagging dictionary, assuming they
* cannot be incorrect.
*/
public void setIgnoreTaggedWords() {
ignoreTaggedWords = true;
}
@Override
public RuleMatch[] match(AnalyzedSentence sentence) throws IOException {
List<RuleMatch> ruleMatches = new ArrayList<>();
AnalyzedTokenReadings[] tokens = getSentenceWithImmunization(sentence).getTokensWithoutWhitespace();
//lazy init
if (speller1 == null) {
String binaryDict = null;
if (JLanguageTool.getDataBroker().resourceExists(getFileName())) {
binaryDict = getFileName();
}
if (binaryDict != null) {
initSpeller(binaryDict);
} else {
// should not happen, as we only configure this rule (or rather its subclasses)
// when we have the resources:
return toRuleMatchArray(ruleMatches);
}
}
int idx = -1;
for (AnalyzedTokenReadings token : tokens) {
idx++;
if (canBeIgnored(tokens, idx, token) || token.isImmunized()) {
continue;
}
// if we use token.getToken() we'll get ignored characters inside and speller will choke
String word = token.getAnalyzedToken(0).getToken();
if (tokenizingPattern() == null) {
ruleMatches.addAll(getRuleMatches(word, token.getStartPos()));
} else {
int index = 0;
Matcher m = tokenizingPattern().matcher(word);
while (m.find()) {
String match = word.subSequence(index, m.start()).toString();
ruleMatches.addAll(getRuleMatches(match, token.getStartPos() + index));
index = m.end();
}
if (index == 0) { // tokenizing char not found
ruleMatches.addAll(getRuleMatches(word, token.getStartPos()));
} else {
ruleMatches.addAll(getRuleMatches(word.subSequence(
index, word.length()).toString(), token.getStartPos() + index));
}
}
}
return toRuleMatchArray(ruleMatches);
}
private void initSpeller(String binaryDict) throws IOException {
String plainTextDict = null;
if (JLanguageTool.getDataBroker().resourceExists(getSpellingFileName())) {
plainTextDict = getSpellingFileName();
}
if (plainTextDict != null) {
speller1 = new MorfologikMultiSpeller(binaryDict, plainTextDict, 1);
speller2 = new MorfologikMultiSpeller(binaryDict, plainTextDict, 2);
setConvertsCase(speller1.convertsCase());
} else {
throw new RuntimeException("Could not find ignore spell file in path: " + getSpellingFileName());
}
}
private boolean canBeIgnored(AnalyzedTokenReadings[] tokens, int idx, AnalyzedTokenReadings token) throws IOException {
return token.isSentenceStart() ||
token.isImmunized() ||
token.isIgnoredBySpeller() ||
isUrl(token.getToken()) ||
isEMail(token.getToken()) ||
(ignoreTaggedWords && token.isTagged()) ||
ignoreToken(tokens, idx);
}
/**
* @return true if the word is misspelled
* @since 2.4
*/
protected boolean isMisspelled(MorfologikMultiSpeller speller, String word) {
if (!speller.isMisspelled(word)) {
return false;
}
if (checkCompound) {
if (compoundRegex.matcher(word).find()) {
String[] words = compoundRegex.split(word);
for (String singleWord: words) {
if (speller.isMisspelled(singleWord)) {
return true;
}
}
return false;
}
}
return true;
}
protected List<RuleMatch> getRuleMatches(String word, int startPos) throws IOException {
List<RuleMatch> ruleMatches = new ArrayList<>();
if (isMisspelled(speller1, word) || isProhibited(word)) {
RuleMatch ruleMatch = new RuleMatch(this, startPos, startPos
+ word.length(), messages.getString("spelling"),
messages.getString("desc_spelling_short"));
List<String> suggestions = speller1.getSuggestions(word);
if (suggestions.size() == 0 && word.length() >= 5) {
// speller1 uses a maximum edit distance of 1, it won't find suggestion for "garentee", "greatful" etc.
suggestions.addAll(speller2.getSuggestions(word));
}
suggestions.addAll(0, getAdditionalTopSuggestions(suggestions, word));
suggestions.addAll(getAdditionalSuggestions(suggestions, word));
if (!suggestions.isEmpty()) {
filterSuggestions(suggestions);
ruleMatch.setSuggestedReplacements(orderSuggestions(suggestions, word));
}
ruleMatches.add(ruleMatch);
}
return ruleMatches;
}
/**
* Get the regular expression pattern used to tokenize
* the words as in the source dictionary. For example,
* it may contain a hyphen, if the words with hyphens are
* not included in the dictionary
* @return A compiled {@link Pattern} that is used to tokenize words or {@code null}.
*/
@Nullable
public Pattern tokenizingPattern() {
return null;
}
protected List<String> orderSuggestions(List<String> suggestions, String word) {
return suggestions;
}
/**
* @param checkCompound If true and the word is not in the dictionary
* it will be split (see {@link #setCompoundRegex(String)})
* and each component will be checked separately
* @since 2.4
*/
protected void setCheckCompound(boolean checkCompound) {
this.checkCompound = checkCompound;
}
/**
* @param compoundRegex see {@link #setCheckCompound(boolean)}
* @since 2.4
*/
protected void setCompoundRegex(String compoundRegex) {
this.compoundRegex = Pattern.compile(compoundRegex);
}
} |
package org.languagetool.rules.fr;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.languagetool.AnalyzedToken;
import org.languagetool.AnalyzedTokenReadings;
import org.languagetool.language.French;
import org.languagetool.rules.RuleMatch;
import org.languagetool.rules.patterns.RuleFilter;
import org.languagetool.synthesis.FrenchSynthesizer;
public class InterrogativeVerbFilter extends RuleFilter {
// private static final Pattern PronounSubject = Pattern.compile("R pers suj
// ([123] [sp])");
private static final FrenchSynthesizer synth = new FrenchSynthesizer(new French());
@Override
public RuleMatch acceptRuleMatch(RuleMatch match, Map<String, String> arguments, int patternTokenPos,
AnalyzedTokenReadings[] patternTokens) throws IOException {
Set<String> replacements = new HashSet<>();
String pronounFrom = getRequired("PronounFrom", arguments);
String verbFrom = getRequired("VerbFrom", arguments);
String desiredPostag = null;
if (pronounFrom != null && verbFrom != null) {
int posPronoun = Integer.parseInt(pronounFrom);
if (posPronoun < 1 || posPronoun > patternTokens.length) {
throw new IllegalArgumentException(
"ConfusionCheckFilter: Index out of bounds in " + match.getRule().getFullId() + ", PronounFrom: " + posPronoun);
}
int posVerb = Integer.parseInt(verbFrom);
if (posVerb < 1 || posVerb > patternTokens.length) {
throw new IllegalArgumentException(
"ConfusionCheckFilter: Index out of bounds in " + match.getRule().getFullId() + ", VerbFrom: " + posVerb);
}
AnalyzedTokenReadings atrVerb = patternTokens[posVerb - 1];
AnalyzedTokenReadings atrPronoun = patternTokens[posPronoun - 1];
if (atrPronoun.matchesPosTagRegex(".* 1 s")) {
desiredPostag = "V .*(ind|cond).* 1 s";
}
if (atrPronoun.matchesPosTagRegex(".* 2 s")) {
desiredPostag = "V .*(ind|cond).* 2 s";
}
if (atrPronoun.matchesPosTagRegex(".* 3 s")) {
desiredPostag = "V .*(ind|cond).* 3 s";
}
if (atrPronoun.matchesPosTagRegex(".* 1 p")) {
desiredPostag = "V .*(ind|cond).* 1 p";
}
if (atrPronoun.matchesPosTagRegex(".* 2 p")) {
desiredPostag = "V .*(ind|cond).* 2 p";
}
if (atrPronoun.matchesPosTagRegex(".* 3 p")) {
desiredPostag = "V .*(ind|cond).* 3 p";
}
if (atrVerb.matchesPosTagRegex("V .*") && desiredPostag != null) {
for (AnalyzedToken at : atrVerb) {
if (at.getPOSTag().startsWith("V ")) {
String synthesized[] = synth.synthesize(at, desiredPostag, true);
if (synthesized != null) {
replacements.addAll(Arrays.asList(synthesized));
}
}
}
}
/*TODO
else {
//if there isn't a verb try to find one with the speller
}*/
}
String message = match.getMessage();
RuleMatch ruleMatch = new RuleMatch(match.getRule(), match.getSentence(), match.getFromPos(), match.getToPos(),
message, match.getShortMessage());
ruleMatch.setType(match.getType());
if (!replacements.isEmpty()) {
ruleMatch.setSuggestedReplacements(new ArrayList<String>(replacements));
}
return ruleMatch;
}
} |
package io.networkreaders.exceptions;
/**
* Thrown when a parse error occurs while parsing a file.
*/
public class ParseException extends Exception {
private final int lineNumber; // contains the number of the line with the error
/**
* Constructs a new parse exception with the specified detail message.
*
* @param message the detail message. The detail message is saved for
* later retrieval by the {@link #getMessage()} method.
*/
public ParseException(String message) {
super(message);
lineNumber = 1; // by default the error is in the first line
}
/**
* Constructs a new parse exception with the specified detail message and line number
* specifying the line where the error occurred.
*
* @param message the detail message. The detail message is saved for
* later retrieval by the {@link #getMessage()} method.
* @param lineNumber number of the line containing the parse error.
*/
public ParseException(String message, int lineNumber) {
super(message);
this.lineNumber = lineNumber;
}
/**
* Returns the line number of the error.
*
* @return the line number of the error
*/
public int getLineNumber() {
return lineNumber;
}
/**
* Returns the detail message string of this throwable.
*
* @return the detail message string of this {@code Throwable} instance
* (which may be {@code null}).
*/
@Override
public String getMessage() {
return super.getMessage().concat(" (in line " + lineNumber + ")");
}
} |
package is2011.reproductor.modelo;
import is2011.biblioteca.contenedores.CancionContainer;
import is2011.reproductor.modelo.listeners.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.DomDriver;
public class ListaReproduccion {
/** Tipo de reproduccion */
public static enum ModoReproduccionEnum {
NORMAL,
ALEATORIO,
REPETIR_UNO,
REPETIR_TODOS
}
/** Lista de canciones */
private ArrayList<CancionContainer> listaReproduccion;
/** Cancion actual. va de 0 a numero de canciones*/
private int actual;
/** Indica si la reproduccion es en modo aleatorio*/
private ModoReproduccionEnum modoReproduccion;
/** Lista de listeners del reproductor */
private ArrayList<ListaReproduccionListener> listeners;
/** Flujo de lectura/escritura para ficheros XML */
private XStream stream;
private boolean modificado;
/**
* Constructor por defecto.
* Crea lista de reproduccion y oyentes.
*/
public ListaReproduccion(){
listaReproduccion = new ArrayList<CancionContainer>();
actual = 0;
modoReproduccion = ModoReproduccionEnum.NORMAL;
listeners = new ArrayList<ListaReproduccionListener>();
this.modificado = false;
stream = new XStream(new DomDriver());
stream.alias("track", CancionContainer.class);
}
/**
* Reinicia la lista de reproduccion. (Quita todas las canciones, actual = 0
* aleatorio lo pone a false...
*
* @param borrarOyentes booleano que indica si queremos borrar todos los
* oyentes de la lista de reproduccion.
*/
public void reiniciar(boolean borrarOyentes) {
listaReproduccion = new ArrayList<CancionContainer>();
actual = 0;
modoReproduccion = ModoReproduccionEnum.NORMAL;
//TODO comprobar
modificado = true;
if(borrarOyentes) {
listeners = new ArrayList<ListaReproduccionListener>();
}
notificaReinicio();
}
private void addCancion(CancionContainer cancion, int pos) {
listaReproduccion.add(pos, cancion);
//TODO comprobar
modificado = true;
notificaNuevaCancionAniadida(cancion, pos);
}
private void addCancionAlFinal(CancionContainer cancion) {
addCancion(cancion,listaReproduccion.size());
//TODO comprobar
modificado = true;
}
/**
* Borra una cancion y notifica a los oyentes.
* @param pos La posicion de la cancion que queremos borrar.0 es la primera
* posicion
*/
private void borrarCancion(int pos) {
if(pos < listaReproduccion.size() && pos >= 0) {
listaReproduccion.remove(pos);
notificaCancionBorrada(pos);
//TODO comprobar
modificado = true;
} else {
throw new IndexOutOfBoundsException();
}
}
public void addCancion(CancionContainer c){
this.addCancionAlFinal(c);
//TODO comprobar
modificado = true;
}
/**
* Indica si la lista esta vacia.
* @return
*/
public boolean isVacia(){
return listaReproduccion.isEmpty();
}
/**
* Devuelve el numero de canciones.
* @return
*/
public int getNumeroCanciones(){
return listaReproduccion.size();
}
/**
*
* @param pos La primera posicion es la 0
*/
public void removeCancion(int pos){
this.borrarCancion(pos);
//Si la cancion que borramos esta por debajo de actual
//O actual apuntaba a la ultima cancion
if (pos < (actual-1) || actual > this.listaReproduccion.size()) {
setActual(actual -1);
}
//TODO comprobar
modificado = true;
}
/**
*
* @param pos La primera posicion es la 0
* @return
*/
public CancionContainer getCancionAt(int pos){
return listaReproduccion.get(pos);
}
@SuppressWarnings("unchecked")
public void cargarXML(String pathYfichero) throws FileNotFoundException{
File aux = new File(pathYfichero);
if (aux.canRead()){
this.listaReproduccion = (ArrayList<CancionContainer>) stream.fromXML(new FileInputStream(pathYfichero));
modificado = true;
notificaNuevaListaReproduccion(listaReproduccion,0);
}else System.out.println("El fichero no existe");
}
public void guardarXML(String pathYfichero) throws FileNotFoundException{
if(modificado)
stream.toXML(listaReproduccion, new FileOutputStream(pathYfichero));
}
public void ordenar(Comparator<CancionContainer> orden){
if(actual == 0) {
Collections.sort(this.listaReproduccion, orden);
modificado = true;
this.notificaNuevaListaReproduccion(this.listaReproduccion, 0);
} else {
CancionContainer cancionActual = this.listaReproduccion.get(actual-1);
Collections.sort(this.listaReproduccion, orden);
modificado = true;
int indiceActual = this.listaReproduccion.indexOf(cancionActual) +1;
this.notificaNuevaListaReproduccion(this.listaReproduccion, indiceActual);
}
}
/**
* Devuelve el numero de la cancion actual. (De 1 hasta size).
* @return La cancion actual.
*/
public int getActual() {
return actual;
}
/**
* El numero de cancion actual.
* @param La cancion actual. 1 es la primera cancion.
*/
public void setActual(int actual) {
int viejo = this.actual;
this.actual = actual;
notificaCambioNumeroCancionActual(actual,viejo);
}
/**
* Devuelve el modo de reproduccion actual.
* @return el modo de reproduccion actual
*/
public ModoReproduccionEnum getModoReproduccion() {
return modoReproduccion;
}
/**
* Establece el modo de reproduccion actual y notifica a los oyentes.
* @param modoReproduccion the modoReproduccion to set
*/
public void setModoReproduccion(ModoReproduccionEnum modoReproduccion) {
this.modoReproduccion = modoReproduccion;
notificaCambioTipoReproduccion(modoReproduccion);
}
public void addListaReproduccionListener(ListaReproduccionListener listener)
{
listeners.add(listener);
}
/**
* Elimina un listener del modelo.
* @param listener El listener.
*/
public void removeListaReproduccionListener(ListaReproduccionListener
listener) {
listeners.remove(listener);
}
/**
* Notifica una nueva lista de reproduccion, es decir, que se borran todas
* las canciones.
*/
private void notificaReinicio() {
for (ListaReproduccionListener l : listeners) {
l.reinicia();
}
}
private void notificaNuevaCancionAniadida(CancionContainer c, int pos) {
for (ListaReproduccionListener l : listeners) {
l.nuevaCancion(new NuevaCancionEvent(c.getTitulo(),c.getAlbum(),
c.getPista(),c.getArtista(), c.getGenero(),c.getDuracion(), pos));
}
}
/**
* Notifica que se ha cambiado de cancion actual.
* @param actual La nueva cancion actual.
*/
private void notificaCambioNumeroCancionActual(int actualNuevo, int actualViejo) {
for (ListaReproduccionListener l : listeners) {
l.setActual(actualNuevo, actualViejo);
}
}
/**
* Indica que se encuentra en reproduccion aleatoria.
* @param aleatorio. Indica si aleatorio es true o false.
*/
private void notificaCambioTipoReproduccion(ModoReproduccionEnum modo) {
for (ListaReproduccionListener l : listeners) {
l.cambioTipoReproduccion(modo);
}
}
/**
* @param pos 0 es la primera posicion.
*/
private void notificaCancionBorrada(int pos) {
for (ListaReproduccionListener l : listeners) {
l.borrarCancion(new BorrarCancionEvent(pos));
}
}
/**
* Le dice a la vista que borre todas las canciones cargadas
* y que cargue la nueva.
* @param
*/
private void notificaNuevaListaReproduccion(ArrayList<CancionContainer> c
,int indiceActual) {
for (ListaReproduccionListener l : listeners) {
l.reinicia();
l.nuevaListaReproduccion(c);
}
this.setActual(indiceActual);
}
} |
package io.compgen.cgpipe.runner.joblog;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.security.SecureRandom;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import io.compgen.common.Pair;
public class JobLog {
static Map<String, JobLog> instances = new HashMap<String, JobLog>();
static protected Log log = LogFactory.getLog(JobLog.class);
// private File lockFile = null;
private final String filename;
// private final String lockSecret = generateRandomString();
// protected List<String> jobIds = new ArrayList<String>();
// protected Map<String, JobLogRecord> records = new HashMap<String,JobLogRecord>();
protected Map<String, String> outputs = new HashMap<String,String>(); // output, jobid
protected JobLog(String filename) throws IOException {
this.filename = filename;
File jobfile = new File(filename);
acquireLock();
if (jobfile.exists()) {
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(filename)));
String line;
while ((line = reader.readLine()) != null) {
String[] cols = line.split("\t", 3);
String jobid = cols[0];
String key = cols[1];
String arg1 = cols[2];
if (key.equals("OUTPUT")) {
log.debug("Existing output/job: " + arg1 + " => " + jobid);
outputs.put(arg1, jobid);
}
// String arg2 = null;
// if (key.equals("SETTING")) {
// cols = line.split("\t", 4);
// arg2 = cols[3];
// if (!records.containsKey(jobid)) {
// records.put(jobid, new JobLogRecord(jobid));
// jobIds.add(jobid);
// JobLogRecord rec = records.get(jobid);
// switch(key) {
// case "NAME":
// rec.setName(arg1);
// break;
// case "RETCODE":
// rec.setReturnCode(Integer.parseInt(arg1));
// break;
// case "SUBMIT":
// rec.setSubmitTime(Long.parseLong(arg1));
// break;
// case "START":
// rec.setStartTime(Long.parseLong(arg1));
// break;
// case "END":
// rec.setEndTime(Long.parseLong(arg1));
// break;
// case "SETTING":
// rec.addSetting(arg1, arg2);
// break;
// case "OUTPUT":
// outputs.put(arg1, jobid);
// rec.addOutput(arg1);
// break;
// case "INPUT":
// rec.addInput(arg1);
// break;
// case "DEP":
// rec.addDep(arg1);
// break;
// case "SRC":
// rec.addSrcLine(arg1);
// break;
// default:
// break;
}
reader.close();
} else if (jobfile.getParentFile() != null && !jobfile.getParentFile().exists()) {
jobfile.getParentFile().mkdirs();
jobfile.createNewFile();
}
}
protected void releaseLock() {
// if (lockFile != null) {
// File child = new File(lockFile, "lock");
// if (child.exists()) {
// try {
// BufferedReader reader = new BufferedReader(new FileReader(child));
// String s = reader.readLine();
// reader.close();
// if (!lockSecret.equals(s)) {
// // we don't own the lock, don't release
// return;
// } catch (IOException e) {
// return;
// child.delete();
// lockFile.delete();
// log.debug("job-log lock released");
}
protected void acquireLock() throws IOException {
//acquireLock(30000);
}
protected void acquireLock(long wait_ms) throws IOException {
return;
// log.debug("Trying to get job-log lock: " + filename);
// if (lockFile != null) {
// log.trace("Lock already acquired!");
// return;
// long ms = System.currentTimeMillis();
// long end = ms + wait_ms;
// long wait = 10;
// boolean first = true;
// while (lockFile == null && System.currentTimeMillis() < end) {
// if (!first) {
// try {
// log.trace("waiting to try to establish lock");
// Thread.sleep(wait);
// } catch (InterruptedException e) {
// if (wait < 100) {
// wait = wait * 2;
// first = false;
// boolean good = false;
// File dir = new File(filename+".lock");
// File child = new File(dir, "lock");
// if (!dir.exists() ) {
// dir.mkdirs();
// try {
// if (!child.exists()) {
// child.createNewFile();
// PrintStream ps = new PrintStream(new FileOutputStream(child));
// ps.println(lockSecret);
// ps.flush();
// ps.close();
// Thread.sleep(100);
// BufferedReader reader = new BufferedReader(new FileReader(child));
// String s = reader.readLine();
// reader.close();
// if (lockSecret.equals(s)) {
// // we own the lock!
// good = true;
// } else {
// log.debug("tried to create the lock, but we got beat... waiting");
// } catch (IOException e) {
// good = false;
// } catch (InterruptedException e) {
// good = false;
// if (good) {
// log.debug("job-log lock acquired");
// lockFile = dir;
// Runtime.getRuntime().addShutdownHook(new Thread() {
// public void run() {
// releaseLock();
// dir.deleteOnExit();
// child.deleteOnExit();
// if (lockFile == null) {
// log.error("Could not get a lock on job-log: "+filename);
// System.exit(2);
}
public static JobLog open(String filename) throws IOException {
if (instances.containsKey(filename)) {
return instances.get(filename);
} else {
instances.put(filename, new JobLog(filename));
return instances.get(filename);
}
}
public String getJobIdForOutput(String output) {
if (outputs.containsKey(output)) {
log.debug("Looking for an existing job for file: "+output+", found job-id: "+outputs.get(output));
return outputs.get(output);
}
log.debug("Looking for an existing job file: "+output+", not found.");
return null;
}
public Map<String, String> getOutputJobIds() {
return Collections.unmodifiableMap(outputs);
}
public void close() {
releaseLock();
instances.remove(filename);
}
public void writeRecord(JobLogRecord rec) {
PrintStream ps = null;
try {
ps = new PrintStream(new FileOutputStream(filename, true));
} catch (FileNotFoundException e) {
log.error("Missing job log??? (this should have been created) -- " + filename);
return;
}
if (rec.getName()!=null) {
ps.println(rec.getJobId()+"\tNAME\t"+rec.getName());
}
if (rec.getUser()!=null) {
ps.println(rec.getJobId()+"\tUSER\t"+rec.getUser());
}
if (rec.getReturnCode()!=null) {
ps.println(rec.getJobId()+"\tRETCODE\t"+rec.getReturnCode());
}
if (rec.getSubmitTime()!=null) {
ps.println(rec.getJobId()+"\tSUBMIT\t"+rec.getSubmitTime());
}
if (rec.getStartTime()!=null) {
ps.println(rec.getJobId()+"\tSTART\t"+rec.getStartTime());
}
if (rec.getEndTime()!=null) {
ps.println(rec.getJobId()+"\tEND\t"+rec.getEndTime());
}
if (rec.getDeps() != null) {
for (String dep: rec.getDeps()) {
ps.println(rec.getJobId()+"\tDEP\t"+dep);
}
}
if (rec.getOutputs() != null) {
for (String out: rec.getOutputs()) {
ps.println(rec.getJobId()+"\tOUTPUT\t"+out);
}
}
if (rec.getInputs() != null) {
for (String inp: rec.getInputs()) {
ps.println(rec.getJobId()+"\tINPUT\t"+inp);
}
}
if (rec.getSrcLines() != null) {
for (String s: rec.getSrcLines()) {
ps.println(rec.getJobId()+"\tSRC\t"+s);
}
}
if (rec.getSettings() != null) {
for (Pair<String, String> p: rec.getSettings()) {
ps.println(rec.getJobId()+"\tSETTING\t"+p.one+"\t"+p.two);
}
}
ps.flush();
ps.close();
}
public void writeStartTime(String jobId) {
PrintStream ps = null;
try {
ps = new PrintStream(new FileOutputStream(filename, true));
} catch (FileNotFoundException e) {
System.err.println("Missing job log??? (this should have been created) -- " + filename);
return;
}
ps.println(jobId+"\tSTART\t"+System.currentTimeMillis());
ps.flush();
ps.close();
}
public void writeEndTime(String jobId, int retcode) {
PrintStream ps = null;
try {
ps = new PrintStream(new FileOutputStream(filename, true));
} catch (FileNotFoundException e) {
System.err.println("Missing job log??? (this should have been created) -- " + filename);
return;
}
ps.println(jobId+"\tEND\t"+System.currentTimeMillis());
ps.println(jobId+"\tRETCODE\t"+retcode);
ps.flush();
ps.close();
}
public static final String UPPER="ABCDEFGHIJKLMNOPQRSTUVWXYZ";
public static final String LOWER="abcdefghijklmnopqrstuvwxyz";
public static final String NUM="0123456789";
public static final String generateRandomString() {
return generateRandomString(24, UPPER+LOWER+NUM);
}
public static final String generateRandomString(int length, String pool) {
SecureRandom rand = new SecureRandom();
String s = "";
while (s.length() < length) {
int next = rand.nextInt(pool.length());
s += pool.charAt(next);
}
return s;
}
} |
package org.apache.commons.lang.math;
import java.util.Random;
/**
* <p><code>JVMRandom</code> is a wrapper that supports all possible
* Random methods via the java.lang.Math.random() method and its system-wide
* Random object.
*
* @author Henri Yandell
* @since 2.0
* @version $Id: JVMRandom.java,v 1.3 2003/05/12 04:29:26 bayard Exp $
*/
public final class JVMRandom extends Random {
// important to not call super() as this will
// call setSeed with the current Time
public JVMRandom() {
}
public synchronized void setSeed(long seed) {
throw new UnsupportedOperationException();
}
public synchronized double nextGaussian() {
throw new UnsupportedOperationException();
}
public void nextBytes(byte[] byteArray) {
throw new UnsupportedOperationException();
}
/**
* Returns the next pseudorandom, uniformly distributed int value
* from the Math.random() sequence.
*
* @return the random int
*/
public int nextInt() {
return nextInt(Integer.MAX_VALUE);
}
/**
* Returns a pseudorandom, uniformly distributed int value between 0
* (inclusive) and the specified value (exclusive), from the
* Math.random() sequence.
*
* @param n the specified exclusive max-value
*
* @return the random int
*/
public int nextInt(int n) {
// check this cannot return 'n'
return (int)(Math.random() * n);
}
/**
* Returns the next pseudorandom, uniformly distributed long value
* from the Math.random() sequence.
*
* @return the random long
*/
public long nextLong() {
// possible loss of precision?
return (long)(Math.random() * Long.MAX_VALUE);
}
/**
* Returns the next pseudorandom, uniformly distributed boolean value
* from the Math.random() sequence.
*
* @return the random boolean
*/
public boolean nextBoolean() {
return (Math.random() > 0.5);
}
/**
* Returns the next pseudorandom, uniformly distributed float value
* between 0.0 and 1.0 from the Math.random() sequence.
*
* @return the random float
*/
public float nextFloat() {
return (float)Math.random();
}
/**
* Synonymous to the Math.random() call.
*
* @return the random double
*/
public double nextDouble() {
return Math.random();
}
} |
package gov.nih.nci.cananolab.ui.core;
import gov.nih.nci.cananolab.domain.particle.NanoparticleSample;
import gov.nih.nci.cananolab.dto.common.LabFileBean;
import gov.nih.nci.cananolab.dto.common.UserBean;
import gov.nih.nci.cananolab.dto.particle.ParticleBean;
import gov.nih.nci.cananolab.exception.CaNanoLabSecurityException;
import gov.nih.nci.cananolab.exception.FileException;
import gov.nih.nci.cananolab.service.common.FileService;
import gov.nih.nci.cananolab.service.particle.NanoparticleSampleService;
import gov.nih.nci.cananolab.service.security.AuthorizationService;
import gov.nih.nci.cananolab.ui.particle.InitNanoparticleSetup;
import gov.nih.nci.cananolab.ui.security.InitSecuritySetup;
import gov.nih.nci.cananolab.util.CaNanoLabConstants;
import gov.nih.nci.cananolab.util.ClassUtils;
import gov.nih.nci.cananolab.util.DataLinkBean;
import gov.nih.nci.cananolab.util.PropertyReader;
import java.io.File;
import java.io.FileInputStream;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.action.ActionMessages;
import org.apache.struts.validator.DynaValidatorForm;
/**
* Base action for all annotation actions
*
* @author pansu
*
*/
public abstract class BaseAnnotationAction extends AbstractDispatchAction {
public ParticleBean setupParticle(DynaValidatorForm theForm,
HttpServletRequest request) throws Exception {
String particleId = request.getParameter("particleId");
if (particleId == null) {
particleId = theForm.getString("particleId");
}
HttpSession session = request.getSession();
UserBean user = (UserBean) session.getAttribute("user");
NanoparticleSampleService service = new NanoparticleSampleService();
ParticleBean particleBean = service
.findNanoparticleSampleById(particleId);
request.setAttribute("theParticle", particleBean);
InitNanoparticleSetup.getInstance().getOtherParticleNames(
request,
particleBean.getDomainParticleSample().getName(),
particleBean.getDomainParticleSample().getSource()
.getOrganizationName(), user);
return particleBean;
}
protected void setupDomainFiles(List<LabFileBean> files,
String particleSampleName, String createdBy, String submitType)
throws Exception {
// setup domainFile for fileBeans
for (LabFileBean fileBean : files) {
String internalUri = InitSetup.getInstance()
.getFileUriFromFormFile(fileBean.getUploadedFile(),
CaNanoLabConstants.FOLDER_PARTICLE,
particleSampleName, submitType);
fileBean.setInternalUri(internalUri);
fileBean.setupDomainFile(createdBy);
}
}
protected void saveFilesToFileSystem(List<LabFileBean> files)
throws Exception {
// save file data to file system and set visibility
AuthorizationService authService = new AuthorizationService(
CaNanoLabConstants.CSM_APP_NAME);
FileService fileService = new FileService();
for (LabFileBean fileBean : files) {
fileService.writeFile(fileBean.getDomainFile(), fileBean
.getFileData());
authService.assignVisibility(fileBean.getDomainFile().getId()
.toString(), fileBean.getVisibilityGroups());
}
}
public boolean loginRequired() {
return false;
}
public boolean canUserExecute(UserBean user)
throws CaNanoLabSecurityException {
return InitSecuritySetup.getInstance().userHasCreatePrivilege(user,
CaNanoLabConstants.CSM_PG_PARTICLE);
}
public Map<String, SortedSet<DataLinkBean>> setupDataTree(
DynaValidatorForm theForm, HttpServletRequest request)
throws Exception {
request.setAttribute("updateDataTree", "true");
String particleId = request.getParameter("particleId");
if (particleId == null) {
if (theForm.getMap().containsKey("particleSampleBean")) {
particleId = ((ParticleBean) theForm.get("particleSampleBean"))
.getDomainParticleSample().getId().toString();
} else {
particleId = theForm.getString("particleId");
}
}
InitSetup.getInstance()
.getDefaultAndOtherLookupTypes(request, "reportCategories",
"Report", "category", "otherCategory", true);
return InitNanoparticleSetup.getInstance().getDataTree(particleId,
request);
}
public ActionForward setupDeleteAll(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
String submitType = request.getParameter("submitType");
DynaValidatorForm theForm = (DynaValidatorForm) form;
Map<String, SortedSet<DataLinkBean>> dataTree = setupDataTree(
theForm, request);
SortedSet<DataLinkBean> dataToDelete = dataTree.get(submitType);
request.getSession().setAttribute("actionName",
dataToDelete.first().getDataLink());
request.getSession().setAttribute("dataToDelete", dataToDelete);
return mapping.findForward("annotationDeleteView");
}
// check for cases where delete can't happen
protected boolean checkDelete(HttpServletRequest request,
ActionMessages msgs, String id) throws Exception {
return true;
}
public ActionForward deleteAll(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
DynaValidatorForm theForm = (DynaValidatorForm) form;
String submitType = request.getParameter("submitType");
String className = InitSetup.getInstance().getObjectName(submitType,
request.getSession().getServletContext());
String fullClassName = ClassUtils.getFullClass(className)
.getCanonicalName();
String[] dataIds = (String[]) theForm.get("idsToDelete");
NanoparticleSampleService sampleService = new NanoparticleSampleService();
ActionMessages msgs = new ActionMessages();
for (String id : dataIds) {
if (!checkDelete(request, msgs, id)) {
return mapping.findForward("annotationDeleteView");
}
sampleService.deleteAnnotationById(fullClassName, new Long(id));
}
setupDataTree(theForm, request);
ActionMessage msg = new ActionMessage("message.deleteAnnotations",
submitType);
msgs.add(ActionMessages.GLOBAL_MESSAGE, msg);
saveMessages(request, msgs);
return mapping.findForward("success");
}
/**
* Download action to handle file downloading and viewing
*
* @param
* @return
*/
public ActionForward download(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
String fileId = request.getParameter("fileId");
UserBean user = (UserBean) request.getSession().getAttribute("user");
FileService service = new FileService();
LabFileBean fileBean = service.findFile(fileId, user);
if (fileBean.getDomainFile().getUriExternal()) {
response.sendRedirect(fileBean.getDomainFile().getUri());
return null;
}
String fileRoot = PropertyReader.getProperty(
CaNanoLabConstants.FILEUPLOAD_PROPERTY, "fileRepositoryDir");
File dFile = new File(fileRoot + File.separator
+ fileBean.getDomainFile().getUri());
if (dFile.exists()) {
response.setContentType("application/octet-stream");
response.setHeader("Content-disposition", "attachment;filename="
+ fileBean.getDomainFile().getName());
response.setHeader("cache-control", "Private");
java.io.InputStream in = new FileInputStream(dFile);
java.io.OutputStream out = response.getOutputStream();
byte[] bytes = new byte[32768];
int numRead = 0;
while ((numRead = in.read(bytes)) > 0) {
out.write(bytes, 0, numRead);
}
out.close();
} else {
ActionMessages msgs = new ActionMessages();
ActionMessage msg = new ActionMessage("error.noFile");
msgs.add(ActionMessages.GLOBAL_MESSAGE, msg);
this.saveErrors(request, msgs);
throw new FileException("File " + fileBean.getDomainFile().getUri()
+ " doesn't exist on the server");
}
return null;
}
protected NanoparticleSample[] prepareCopy(HttpServletRequest request,
DynaValidatorForm theForm) throws Exception {
String[] otherParticles = (String[]) theForm.get("otherParticles");
if (otherParticles.length == 0) {
return null;
}
NanoparticleSample[] particleSamples = new NanoparticleSample[otherParticles.length];
NanoparticleSampleService sampleService = new NanoparticleSampleService();
int i = 0;
for (String other : otherParticles) {
NanoparticleSample particleSample = sampleService
.findNanoparticleSampleByName(other);
particleSamples[i] = particleSample;
i++;
}
// retrieve file contents
// FileService fileService = new FileService();
// for (DerivedBioAssayDataBean file : entityBean.getFiles()) {
// byte[] content = fileService.getFileContent(new Long(file.getId()));
// file.setFileContent(content);
// NanoparticleSampleService service = new NanoparticleSampleService();
// UserBean user = (UserBean) request.getSession().getAttribute("user");
// int i = 0;
// for (String particleName : otherParticles) {
// NanoparticleEntityBean newEntityBean = entityBean.copy();
// // overwrite particle
// ParticleBean otherParticle = service.findNanoparticleSampleByName(
// particleName, user);
// newrBean.setParticle(otherParticle);
// // reset view title
// String timeStamp = StringUtils.convertDateToString(new Date(),
// "MMddyyHHmmssSSS");
// String autoTitle =
// CaNanoLabConstants.AUTO_COPY_CHARACTERIZATION_VIEW_TITLE_PREFIX
// + timeStamp;
// newCharBean.setViewTitle(autoTitle);
// List<DerivedBioAssayDataBean> dataList = newCharBean
// .getDerivedBioAssayDataList();
// // replace particleName in path and uri with new particleName
// for (DerivedBioAssayDataBean derivedBioAssayData : dataList) {
// String origUri = derivedBioAssayData.getUri();
// if (origUri != null)
// derivedBioAssayData.setUri(origUri.replace(particle
// .getSampleName(), particleName));
// charBeans[i] = newCharBean;
return particleSamples;
}
} |
package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
/** Subclass of FilteredTermEnum for enumerating all terms that are similiar to the specified filter term.
<p>Term enumerations are always ordered by Term.compareTo(). Each term in
the enumeration is greater than all that precede it. */
public final class FuzzyTermEnum extends FilteredTermEnum {
double distance;
boolean fieldMatch = false;
boolean endEnum = false;
Term searchTerm = null;
String field = "";
String text = "";
int textlen;
public FuzzyTermEnum(IndexReader reader, Term term) throws IOException {
super(reader, term);
searchTerm = term;
field = searchTerm.field();
text = searchTerm.text();
textlen = text.length();
setEnum(reader.terms(new Term(searchTerm.field(), "")));
}
/**
The termCompare method in FuzzyTermEnum uses Levenshtein distance to
calculate the distance between the given term and the comparing term.
*/
protected final boolean termCompare(Term term) {
if (field == term.field()) {
String target = term.text();
int targetlen = target.length();
int dist = editDistance(text, target, textlen, targetlen);
distance = 1 - ((double)dist / (double)Math.min(textlen, targetlen));
return (distance > FUZZY_THRESHOLD);
}
endEnum = true;
return false;
}
protected final float difference() {
return (float)((distance - FUZZY_THRESHOLD) * SCALE_FACTOR);
}
public final boolean endEnum() {
return endEnum;
}
public static final double FUZZY_THRESHOLD = 0.5;
public static final double SCALE_FACTOR = 1.0f / (1.0f - FUZZY_THRESHOLD);
/**
Finds and returns the smallest of three integers
*/
private static final int min(int a, int b, int c) {
int t = (a < b) ? a : b;
return (t < c) ? t : c;
}
/**
* This static array saves us from the time required to create a new array
* everytime editDistance is called.
*/
private int e[][] = new int[1][1];
/**
Levenshtein distance also known as edit distance is a measure of similiarity
between two strings where the distance is measured as the number of character
deletions, insertions or substitutions required to transform one string to
the other string.
<p>This method takes in four parameters; two strings and their respective
lengths to compute the Levenshtein distance between the two strings.
The result is returned as an integer.
*/
private final int editDistance(String s, String t, int n, int m) {
if (e.length <= n || e[0].length <= m) {
e = new int[Math.max(e.length, n+1)][Math.max(e[0].length, m+1)];
}
int d[][] = e; // matrix
int i; // iterates through s
int j; // iterates through t
char s_i; // ith character of s
if (n == 0) return m;
if (m == 0) return n;
// init matrix d
for (i = 0; i <= n; i++) d[i][0] = i;
for (j = 0; j <= m; j++) d[0][j] = j;
// start computing edit distance
for (i = 1; i <= n; i++) {
s_i = s.charAt(i - 1);
for (j = 1; j <= m; j++) {
if (s_i != t.charAt(j-1))
d[i][j] = min(d[i-1][j], d[i][j-1], d[i-1][j-1])+1;
else d[i][j] = min(d[i-1][j]+1, d[i][j-1]+1, d[i-1][j-1]);
}
}
// we got the result!
return d[n][m];
}
public void close() throws IOException {
super.close();
searchTerm = null;
field = null;
text = null;
}
} |
package org.jdesktop.swingx.plaf;
import java.awt.Container;
import javax.swing.JComponent;
import javax.swing.plaf.PanelUI;
/**
*
* @author rbair
*/
public abstract class TitledPanelUI extends PanelUI {
/**
* Adds the given JComponent as a decoration on the right of the title
* @param decoration
*/
public abstract void addRightDecoration(JComponent decoration);
/**
* Adds the given JComponent as a decoration on the left of the title
* @param decoration
*/
public abstract void addLeftDecoration(JComponent decoration);
/**
* @return the Container acting as the title bar for this component
*/
public abstract Container getTitleBar();
} |
package com.cloud.storage.dao;
import java.util.Date;
import java.util.List;
import javax.ejb.Local;
import javax.persistence.EntityExistsException;
import org.apache.log4j.Logger;
import com.cloud.storage.DiskOfferingVO;
import com.cloud.storage.DiskOfferingVO.Type;
import com.cloud.utils.db.Attribute;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.SearchCriteria.Op;
@Local(value={DiskOfferingDao.class})
public class DiskOfferingDaoImpl extends GenericDaoBase<DiskOfferingVO, Long> implements DiskOfferingDao {
private static final Logger s_logger = Logger.getLogger(DiskOfferingDaoImpl.class);
private final SearchBuilder<DiskOfferingVO> DomainIdSearch;
private final SearchBuilder<DiskOfferingVO> PrivateDiskOfferingSearch;
private final SearchBuilder<DiskOfferingVO> PublicDiskOfferingSearch;
protected final SearchBuilder<DiskOfferingVO> UniqueNameSearch;
private final Attribute _typeAttr;
protected DiskOfferingDaoImpl() {
DomainIdSearch = createSearchBuilder();
DomainIdSearch.and("domainId", DomainIdSearch.entity().getDomainId(), SearchCriteria.Op.EQ);
DomainIdSearch.and("removed", DomainIdSearch.entity().getRemoved(), SearchCriteria.Op.NULL);
DomainIdSearch.done();
PrivateDiskOfferingSearch = createSearchBuilder();
PrivateDiskOfferingSearch.and("diskSize", PrivateDiskOfferingSearch.entity().getDiskSize(), SearchCriteria.Op.EQ);
PrivateDiskOfferingSearch.done();
PublicDiskOfferingSearch = createSearchBuilder();
PublicDiskOfferingSearch.and("domainId", PublicDiskOfferingSearch.entity().getDomainId(), SearchCriteria.Op.NULL);
PublicDiskOfferingSearch.and("system", PublicDiskOfferingSearch.entity().getSystemUse(), SearchCriteria.Op.EQ);
PublicDiskOfferingSearch.and("removed", PublicDiskOfferingSearch.entity().getRemoved(), SearchCriteria.Op.NULL);
PublicDiskOfferingSearch.done();
UniqueNameSearch = createSearchBuilder();
UniqueNameSearch.and("name", UniqueNameSearch.entity().getUniqueName(), SearchCriteria.Op.EQ);
UniqueNameSearch.done();
_typeAttr = _allAttributes.get("type");
}
@Override
public List<DiskOfferingVO> listByDomainId(long domainId) {
SearchCriteria<DiskOfferingVO> sc = DomainIdSearch.create();
sc.setParameters("domainId", domainId);
// FIXME: this should not be exact match, but instead should find all available disk offerings from parent domains
return listBy(sc);
}
@Override
public List<DiskOfferingVO> findPrivateDiskOffering() {
SearchCriteria<DiskOfferingVO> sc = PrivateDiskOfferingSearch.create();
sc.setParameters("diskSize", 0);
return listBy(sc);
}
@Override
public List<DiskOfferingVO> searchIncludingRemoved(SearchCriteria<DiskOfferingVO> sc, final Filter filter, final Boolean lock, final boolean cache) {
sc.addAnd(_typeAttr, Op.EQ, Type.Disk);
return super.searchIncludingRemoved(sc, filter, lock, cache);
}
@Override
public <K> List<K> customSearchIncludingRemoved(SearchCriteria<K> sc, final Filter filter) {
sc.addAnd(_typeAttr, Op.EQ, Type.Disk);
return super.customSearchIncludingRemoved(sc, filter);
}
@Override
protected List<DiskOfferingVO> executeList(final String sql, final Object... params) {
StringBuilder builder = new StringBuilder(sql);
int index = builder.indexOf("WHERE");
if (index == -1) {
builder.append(" WHERE type=?");
} else {
builder.insert(index + 6, "type=? ");
}
return super.executeList(sql, Type.Disk, params);
}
@Override
public List<DiskOfferingVO> findPublicDiskOfferings(){
SearchCriteria<DiskOfferingVO> sc = PublicDiskOfferingSearch.create();
sc.setParameters("system", false);
return listBy(sc);
}
@Override
public DiskOfferingVO findByUniqueName(String uniqueName) {
SearchCriteria<DiskOfferingVO> sc = UniqueNameSearch.create();
sc.setParameters("name", uniqueName);
List<DiskOfferingVO> vos = search(sc, null, null, false);
if (vos.size() == 0) {
return null;
}
return vos.get(0);
}
@Override
public DiskOfferingVO persistDeafultDiskOffering(DiskOfferingVO offering) {
assert offering.getUniqueName() != null : "unique name shouldn't be null for the disk offering";
DiskOfferingVO vo = findByUniqueName(offering.getUniqueName());
if (vo != null) {
return vo;
}
try {
return persist(offering);
} catch (EntityExistsException e) {
// Assume it's conflict on unique name
return findByUniqueName(offering.getUniqueName());
}
}
@Override
public boolean remove(Long id) {
DiskOfferingVO diskOffering = createForUpdate();
diskOffering.setRemoved(new Date());
return update(id, diskOffering);
}
} |
package io.bitsquare.app;
import ch.qos.logback.classic.Logger;
import com.google.inject.Guice;
import com.google.inject.Injector;
import io.bitsquare.alert.AlertManager;
import io.bitsquare.arbitration.ArbitratorManager;
import io.bitsquare.btc.WalletService;
import io.bitsquare.common.UserThread;
import io.bitsquare.common.handlers.ResultHandler;
import io.bitsquare.common.util.Utilities;
import io.bitsquare.gui.SystemTray;
import io.bitsquare.gui.common.UITimer;
import io.bitsquare.gui.common.view.CachingViewLoader;
import io.bitsquare.gui.common.view.View;
import io.bitsquare.gui.common.view.ViewLoader;
import io.bitsquare.gui.common.view.guice.InjectorViewFactory;
import io.bitsquare.gui.main.MainView;
import io.bitsquare.gui.main.MainViewModel;
import io.bitsquare.gui.main.debug.DebugView;
import io.bitsquare.gui.main.overlays.popups.Popup;
import io.bitsquare.gui.main.overlays.windows.EmptyWalletWindow;
import io.bitsquare.gui.main.overlays.windows.SendAlertMessageWindow;
import io.bitsquare.gui.util.ImageUtil;
import io.bitsquare.p2p.P2PService;
import io.bitsquare.storage.Storage;
import io.bitsquare.trade.offer.OpenOfferManager;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Label;
import javafx.scene.image.Image;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyCodeCombination;
import javafx.scene.input.KeyCombination;
import javafx.scene.input.KeyEvent;
import javafx.scene.layout.Pane;
import javafx.scene.layout.StackPane;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import org.bitcoinj.store.BlockStoreException;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.controlsfx.dialog.Dialogs;
import org.reactfx.EventStreams;
import org.slf4j.LoggerFactory;
import org.springframework.core.env.Environment;
import java.io.IOException;
import java.nio.file.Paths;
import java.security.Security;
import java.util.ArrayList;
import java.util.List;
import static io.bitsquare.app.BitsquareEnvironment.APP_NAME_KEY;
public class BitsquareApp extends Application {
private static final Logger log = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(BitsquareApp.class);
public static final boolean DEV_MODE = false;
public static final boolean IS_RELEASE_VERSION = !DEV_MODE && true;
private static Environment env;
private BitsquareAppModule bitsquareAppModule;
private Injector injector;
private boolean popupOpened;
private static Stage primaryStage;
private Scene scene;
private final List<String> corruptedDatabaseFiles = new ArrayList<>();
private MainView mainView;
public static Runnable shutDownHandler;
public static void setEnvironment(Environment env) {
BitsquareApp.env = env;
}
@Override
public void start(Stage primaryStage) throws IOException {
String logPath = Paths.get(env.getProperty(BitsquareEnvironment.APP_DATA_DIR_KEY), "bitsquare").toString();
Log.setup(logPath, !IS_RELEASE_VERSION);
log.info("Log files under: " + logPath);
Version.printVersion();
UserThread.setExecutor(Platform::runLater);
UserThread.setTimerClass(UITimer.class);
// setup UncaughtExceptionHandler
Thread.UncaughtExceptionHandler handler = (thread, throwable) -> {
// Might come from another thread
if (throwable.getCause() != null && throwable.getCause().getCause() != null &&
throwable.getCause().getCause() instanceof BlockStoreException) {
log.error(throwable.getMessage());
} else {
log.error("Uncaught Exception from thread " + Thread.currentThread().getName());
log.error("Uncaught Exception throwableMessage= " + throwable.getMessage());
throwable.printStackTrace();
UserThread.execute(() -> showErrorPopup(throwable, false));
}
};
Thread.setDefaultUncaughtExceptionHandler(handler);
Thread.currentThread().setUncaughtExceptionHandler(handler);
Security.addProvider(new BouncyCastleProvider());
BitsquareApp.primaryStage = primaryStage;
shutDownHandler = this::stop;
try {
// Guice
bitsquareAppModule = new BitsquareAppModule(env, primaryStage);
injector = Guice.createInjector(bitsquareAppModule);
injector.getInstance(InjectorViewFactory.class).setInjector(injector);
Version.setBtcNetworkId(injector.getInstance(BitsquareEnvironment.class).getBitcoinNetwork().ordinal());
Storage.setDatabaseCorruptionHandler((String fileName) -> {
corruptedDatabaseFiles.add(fileName);
if (mainView != null)
mainView.setPersistedFilesCorrupted(corruptedDatabaseFiles);
});
// load the main view and create the main scene
CachingViewLoader viewLoader = injector.getInstance(CachingViewLoader.class);
mainView = (MainView) viewLoader.load(MainView.class);
mainView.setPersistedFilesCorrupted(corruptedDatabaseFiles);
/* Storage.setDatabaseCorruptionHandler((String fileName) -> {
corruptedDatabaseFiles.add(fileName);
if (mainView != null)
mainView.setPersistedFilesCorrupted(corruptedDatabaseFiles);
});*/
scene = new Scene(mainView.getRoot(), 1100, 740);
scene.getStylesheets().setAll(
"/io/bitsquare/gui/bitsquare.css",
"/io/bitsquare/gui/images.css");
// configure the system tray
SystemTray systemTray = SystemTray.create(primaryStage, shutDownHandler);
primaryStage.setOnCloseRequest(event -> {
event.consume();
stop();
});
scene.addEventHandler(KeyEvent.KEY_RELEASED, keyEvent -> {
if (new KeyCodeCombination(KeyCode.W, KeyCombination.SHORTCUT_DOWN).match(keyEvent)) {
stop();
} else if (new KeyCodeCombination(KeyCode.Q, KeyCombination.SHORTCUT_DOWN).match(keyEvent)) {
stop();
} else if (new KeyCodeCombination(KeyCode.E, KeyCombination.SHORTCUT_DOWN).match(keyEvent)) {
showEmptyWalletPopup();
} else if (new KeyCodeCombination(KeyCode.M, KeyCombination.SHORTCUT_DOWN).match(keyEvent)) {
showSendAlertMessagePopup();
} else if (new KeyCodeCombination(KeyCode.F, KeyCombination.SHORTCUT_DOWN).match(keyEvent))
showFPSWindow();
else if (BitsquareApp.DEV_MODE) {
if (new KeyCodeCombination(KeyCode.D, KeyCombination.SHORTCUT_DOWN).match(keyEvent))
showDebugWindow();
}
});
// configure the primary stage
primaryStage.setTitle(env.getRequiredProperty(APP_NAME_KEY));
primaryStage.setScene(scene);
primaryStage.setMinWidth(1040);
primaryStage.setMinHeight(620);
// on windows the title icon is also used as task bar icon in a larger size
// on Linux no title icon is supported but also a large task bar icon is derived form that title icon
String iconPath;
if (Utilities.isOSX())
iconPath = ImageUtil.isRetina() ? "/images/window_icon@2x.png" : "/images/window_icon.png";
else if (Utilities.isWindows())
iconPath = "/images/task_bar_icon_windows.png";
else
iconPath = "/images/task_bar_icon_linux.png";
primaryStage.getIcons().add(new Image(getClass().getResourceAsStream(iconPath)));
// make the UI visible
primaryStage.show();
//showDebugWindow();
} catch (Throwable throwable) {
showErrorPopup(throwable, false);
}
}
private void showSendAlertMessagePopup() {
AlertManager alertManager = injector.getInstance(AlertManager.class);
new SendAlertMessageWindow()
.onAddAlertMessage((alert, privKeyString) -> alertManager.addAlertMessageIfKeyIsValid(alert, privKeyString))
.onRemoveAlertMessage(privKeyString -> alertManager.removeAlertMessageIfKeyIsValid(privKeyString))
.show();
}
private void showEmptyWalletPopup() {
injector.getInstance(EmptyWalletWindow.class).show();
}
private void showErrorPopup(Throwable throwable, boolean doShutDown) {
if (scene == null) {
scene = new Scene(new StackPane(), 1000, 650);
primaryStage.setScene(scene);
primaryStage.show();
}
try {
throwable.printStackTrace();
try {
if (!popupOpened) {
String message = throwable.getMessage();
popupOpened = true;
if (message != null)
new Popup().error(message).onClose(() -> popupOpened = false).show();
else
new Popup().error(throwable.toString()).onClose(() -> popupOpened = false).show();
}
} catch (Throwable throwable3) {
log.error("Error at displaying Throwable.");
throwable3.printStackTrace();
}
if (doShutDown)
stop();
} catch (Throwable throwable2) {
// If printStackTrace cause a further exception we don't pass the throwable to the Popup.
Dialogs.create()
.owner(primaryStage)
.title("Error")
.message(throwable.toString())
.masthead("A fatal exception occurred at startup.")
.showError();
if (doShutDown)
stop();
}
}
// Used for debugging trade process
private void showDebugWindow() {
ViewLoader viewLoader = injector.getInstance(ViewLoader.class);
View debugView = viewLoader.load(DebugView.class);
Parent parent = (Parent) debugView.getRoot();
Stage stage = new Stage();
stage.setScene(new Scene(parent));
stage.setTitle("Debug window");
stage.initModality(Modality.NONE);
stage.initStyle(StageStyle.UTILITY);
stage.initOwner(scene.getWindow());
stage.setX(primaryStage.getX() + primaryStage.getWidth() + 10);
stage.setY(primaryStage.getY());
stage.show();
}
private void showFPSWindow() {
Label label = new Label();
EventStreams.animationTicks()
.latestN(100)
.map(ticks -> {
int n = ticks.size() - 1;
return n * 1_000_000_000.0 / (ticks.get(n) - ticks.get(0));
})
.map(d -> String.format("FPS: %.3f", d))
.feedTo(label.textProperty());
Pane root = new StackPane();
root.getChildren().add(label);
Stage stage = new Stage();
stage.setScene(new Scene(root));
stage.setTitle("FPS");
stage.initModality(Modality.NONE);
stage.initStyle(StageStyle.UTILITY);
stage.initOwner(scene.getWindow());
stage.setX(primaryStage.getX() + primaryStage.getWidth() + 10);
stage.setY(primaryStage.getY());
stage.setWidth(200);
stage.setHeight(100);
stage.show();
}
@Override
public void stop() {
gracefulShutDown(() -> {
log.info("App shutdown complete");
System.exit(0);
});
}
private void gracefulShutDown(ResultHandler resultHandler) {
log.debug("gracefulShutDown");
new Popup().headLine("Shut down in progress")
.backgroundInfo("Shutting down application can take a few seconds.\n" +
"Please don't interrupt that process.").closeButtonText("Ok")
.show();
try {
if (injector != null) {
injector.getInstance(ArbitratorManager.class).shutDown();
injector.getInstance(MainViewModel.class).shutDown();
injector.getInstance(OpenOfferManager.class).shutDown(() -> {
injector.getInstance(P2PService.class).shutDown(() -> {
injector.getInstance(WalletService.class).shutDownDone.addListener((ov, o, n) -> {
bitsquareAppModule.close(injector);
log.info("Graceful shutdown completed");
resultHandler.handleResult();
});
injector.getInstance(WalletService.class).shutDown();
});
});
// we wait max 5 sec.
UserThread.runAfter(resultHandler::handleResult, 5);
} else {
UserThread.runAfter(resultHandler::handleResult, 1);
}
} catch (Throwable t) {
log.info("App shutdown failed with exception");
t.printStackTrace();
System.exit(1);
}
}
} |
// $Id: Grib1Data.java,v 1.11 2005/12/13 22:58:55 rkambic Exp $
package ucar.grib.grib1;
import ucar.grib.*;
import ucar.unidata.io.RandomAccessFile;
/*
* Grib1Data.java 1.0 10/12/2004
*
* @author Robb Kambic
*
*/
import java.io.IOException;
/**
* A class used to extract data from a GRIB1 file.
* see <a href="../../../IndexFormat.txt"> IndexFormat.txt</a>
*/
public final class Grib1Data {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Grib1Data.class);
/*
* should Thin Grids be expanded? Default is true
*/
static private boolean expandGrib1ThinGrids = true;
/*
* used to hold open file descriptor
*/
private final RandomAccessFile raf;
/**
* Constructs a Grib2Data object from a stream.
*
* @param raf ucar.unidata.io.RandomAccessFile with GRIB content.
*/
public Grib1Data(RandomAccessFile raf) {
this.raf = raf;
}
/**
* Reads the Grib data
*
* @param gdsOffset PDS offset into file.
* @param dataOffset GDS offset into file.
* @return float[]
* @throws IOException
*/
public final float[] getData(long gdsOffset, long dataOffset, int decimalScale, boolean bmsExists)
throws IOException {
//long start = System.currentTimeMillis();
Grib1GridDefinitionSection gds = null;
Grib1GDSVariables gdsv = null;
boolean isThin = false;
try {
// check for thin grids
if ( gdsOffset != -1 ) {
raf.seek(gdsOffset);
gds = new Grib1GridDefinitionSection(raf);
gdsv = gds.getGdsVars();
int PVorPL = gdsv.getPVorPL();
int NV = gdsv.getNV();
isThin = false;
if (PVorPL != 255 && (NV == 0 || NV == 255) ) {
isThin = true;
}
}
} catch (NoValidGribException nvge) {
log.debug("gds exception was caught");
}
// seek data start
raf.seek(dataOffset);
// Need section 3 and 4 to read/interpet the data
Grib1BitMapSection bms = null;
if (bmsExists) {
// read Bit Mapped Section 3
bms = new Grib1BitMapSection(raf);
}
try {
// read Binary Data Section 4
// Grib1BinaryDataSection bds =
// new Grib1BinaryDataSection(raf, decimalScale, bms, gdsv.getScanMode(), gdsv.getNx(), gdsv.getNy() );
// if (isThin && expandGrib1ThinGrids) {
// QuasiRegular qr = new QuasiRegular(bds.getValues(), gdsv.getParallels(), gdsv.getNx(), gdsv.getNy() );
// return qr.getData();
// } else {
// return bds.getValues();
if ( !isThin ) { // 99% path
Grib1BinaryDataSection bds =
new Grib1BinaryDataSection(raf, decimalScale, bms, gdsv.getScanMode(), gdsv.getNx(), gdsv.getNy() );
return bds.getValues();
}
// Process thin grids
Grib1BinaryDataSection bds =
new Grib1BinaryDataSection(raf, decimalScale, bms, gdsv.getScanMode(), -1, gdsv.getNy() );
if (expandGrib1ThinGrids) {
QuasiRegular qr = new QuasiRegular(bds.getValues(), gdsv.getParallels(), gdsv.getNx(), gdsv.getNy() );
return qr.getData();
} else { // return unexpanded values, does not work in CDM stack code
return bds.getValues();
}
} catch (NotSupportedException notSupport) {
log.error("Grib1BinaryDataSection exception was caught");
return null;
}
} // end getData
/**
* This code should not be used any more, old code left for old indexes <8
*
* Reads the Grib data
*
* @param offset offset into file.
* @param decimalScale
* @param bmsExists
* @return float[]
* @throws NotSupportedException
* @throws IOException
*/
public final float[] getData(long offset, int decimalScale, boolean bmsExists)
throws IOException {
//long start = System.currentTimeMillis();
// check if the offset is for the GDS or Data. The new indexer code
// will make all the offsets GDS
boolean isThin = false;
Grib1GridDefinitionSection gds = null;
Grib1GDSVariables gdsv = null;
try {
raf.seek(offset);
//System.out.print( "seek took "+ (System.currentTimeMillis() - start) );
//System.out.println( "raf.getFilePointer()="+ raf.getFilePointer() );
// Read/Check if this is section 2 GDS
// octets 1-3 (Length of GDS)
int length = GribNumbers.uint3(raf);
//System.out.println( "GDS length = " + length );
// octets 4 NV
int NV = raf.read();
//System.out.println( "GDS NV = " + NV );
// octet 5 PL is this a Quasi/Thin Grid no == 255
int P_VorL = raf.read();
//System.out.println( "GDS PL = " + P_VorL );
if (length < 50) {
raf.skipBytes(length - 5);
// Quasi/Thin grid
} else if (length < 1200 && !bmsExists) {
if (P_VorL != 255) {
raf.skipBytes(-5); //reset raf to start of GDS
gds = new Grib1GridDefinitionSection(raf);
gdsv = gds.getGdsVars();
isThin = gdsv.isThin();
//System.out.println( "GDS isThin = " + isThin );
// sigma vertical coordinates
} else { // NV != 0 && NV != 255
raf.skipBytes(length - 5);
//System.out.println( "GDS sigma vertical coordinates" );
}
// non standard sigma vertical coordinates
} else if (length == ((NV * 4) + 32)) {
raf.skipBytes(length - 5);
// tighter critera if bmsExist could be an error
} else if (length < 600) {
if (P_VorL != 255) {
raf.skipBytes(-5); //reset raf to start of GDS
gds = new Grib1GridDefinitionSection(raf);
gdsv = gds.getGdsVars();
isThin = gdsv.isThin();
//System.out.println( "GDS isThin = " + isThin );
// sigma vertical coordinates
} else { // NV != 0 && NV != 255
raf.skipBytes(length - 5);
//System.out.println( "GDS sigma vertical coordinates" );
}
} else {
raf.seek(offset);
}
} catch (NoValidGribException nvge) {
log.error("gds exception was caught");
raf.seek(offset);
}
// Need section 3 and 4 to read/interpet the data, section 5
// as a check that all data read and sections are correct
Grib1BitMapSection bms = null;
if (bmsExists) {
// read Bit Mapped Section 3
bms = new Grib1BitMapSection(raf);
}
try {
// read Binary Data Section 4
if ( !isThin ) { // 99% path
Grib1BinaryDataSection bds =
new Grib1BinaryDataSection(raf, decimalScale, bms, gdsv.getScanMode(), gdsv.getNx(), gdsv.getNy() );
return bds.getValues();
}
// Process thin grids
Grib1BinaryDataSection bds =
new Grib1BinaryDataSection(raf, decimalScale, bms, gdsv.getScanMode(), -1, gdsv.getNy() );
if (expandGrib1ThinGrids) {
QuasiRegular qr = new QuasiRegular(bds.getValues(), gdsv.getParallels(), gdsv.getNx(), gdsv.getNy() );
return qr.getData();
} else { // return unexpanded values, does not work in CDM stack code
return bds.getValues();
}
} catch (NotSupportedException notSupport) {
log.error("Grib1BinaryDataSection exception was caught");
return null;
}
} // end getData
/*
* Lets client control Thin grid expansion
*/
public static void setExpandGrib1ThinGrids( boolean b ) {
expandGrib1ThinGrids = b;
}
} // end Grib1Data |
package io.redspark.ireadme.entity;
import java.util.ArrayList;
import java.util.Collection;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.ManyToMany;
import javax.persistence.Table;
import org.hibernate.envers.Audited;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
@Entity
@Audited
@Table(name = "user")
public class User extends AbstractEntity {
public static final String ROLE_USER = "ROLE_USER";
public static final String ROLE_ADMIN = "ROLE_ADMIN";
@Column(name = "email")
private String email;
@Column(name = "password")
private String password;
@Column(name = "nickname")
private String nickname;
@CollectionTable(name = "roles")
@ElementCollection(fetch = FetchType.EAGER)
private Collection<String> roles = new ArrayList<>();
@ManyToMany(mappedBy = "users")
private Collection<Team> teams = new ArrayList<>();
public User() {
super();
}
public User(String email, String password) {
super();
this.email = email;
this.password = new BCryptPasswordEncoder().encode(password);
this.roles.add(ROLE_USER);
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public String getNickname() {
return nickname;
}
public Collection<String> getRoles() {
return roles;
}
public void setRoles(Collection<String> roles) {
this.roles = roles;
}
public Collection<Team> getTeams() {
return teams;
}
} |
package org.tuckey.web.filters.urlrewrite;
import org.tuckey.web.filters.urlrewrite.gzip.GzipFilter;
import org.tuckey.web.filters.urlrewrite.utils.Log;
import org.tuckey.web.filters.urlrewrite.utils.ModRewriteConfLoader;
import org.tuckey.web.filters.urlrewrite.utils.StringUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import org.xml.sax.SAXParseException;
import javax.servlet.ServletContext;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class Conf {
private static Log log = Log.getLog(Conf.class);
private final List errors = new ArrayList();
private final List rules = new ArrayList(50);
private final List catchElems = new ArrayList(10);
private List outboundRules = new ArrayList(50);
private boolean ok = false;
private Date loadedDate = null;
private int ruleIdCounter = 0;
private int outboundRuleIdCounter = 0;
private String fileName;
private String confSystemId;
protected boolean useQueryString;
protected boolean useContext;
private static final String NONE_DECODE_USING = "null";
private static final String HEADER_DECODE_USING = "header";
private static final String DEFAULT_DECODE_USING = "header,utf-8";
protected String decodeUsing = DEFAULT_DECODE_USING;
private boolean decodeUsingEncodingHeader;
protected String defaultMatchType = null;
private ServletContext context;
private boolean docProcessed = false;
private boolean engineEnabled = true;
/**
* Empty const for testing etc.
*/
public Conf() {
loadedDate = new Date();
}
/**
* Constructor for use only when loading XML style configuration.
*
* @param fileName to display on status screen
*/
public Conf(ServletContext context, final InputStream inputStream, String fileName, String systemId) {
this(context, inputStream, fileName, systemId, false);
}
/**
* Normal constructor.
*
* @param fileName to display on status screen
* @param modRewriteStyleConf true if loading mod_rewrite style conf
*/
public Conf(ServletContext context, final InputStream inputStream, String fileName, String systemId,
boolean modRewriteStyleConf) {
// make sure context is setup before calling initialise()
this.context = context;
this.fileName = fileName;
this.confSystemId = systemId;
if (modRewriteStyleConf) {
loadModRewriteStyle(inputStream);
} else {
loadDom(inputStream);
}
if (docProcessed) initialise();
loadedDate = new Date();
}
protected void loadModRewriteStyle(InputStream inputStream) {
ModRewriteConfLoader loader = new ModRewriteConfLoader();
try {
loader.process(inputStream, this);
docProcessed = true; // fixed
} catch (IOException e) {
addError("Exception loading conf " + " " + e.getMessage(), e);
}
}
/**
* Constructor when run elements don't need to be initialised correctly, for docuementation etc.
*/
public Conf(URL confUrl) {
// make sure context is setup before calling initialise()
this.context = null;
this.fileName = confUrl.getFile();
this.confSystemId = confUrl.toString();
try {
loadDom(confUrl.openStream());
} catch (IOException e) {
addError("Exception loading conf " + " " + e.getMessage(), e);
}
if (docProcessed) initialise();
loadedDate = new Date();
}
/**
* Constructor when run elements don't need to be initialised correctly, for docuementation etc.
*/
public Conf(InputStream inputStream, String conffile) {
this(null, inputStream, conffile, conffile);
}
/**
* Load the dom document from the inputstream
* <p/>
* Note, protected so that is can be extended.
*
* @param inputStream stream of the conf file to load
*/
protected synchronized void loadDom(final InputStream inputStream) {
if (inputStream == null) {
log.error("inputstream is null");
return;
}
DocumentBuilder parser;
/**
* the thing that resolves dtd's and other xml entities.
*/
ConfHandler handler = new ConfHandler(confSystemId);
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
log.debug("XML builder factory is: " + factory.getClass().getName());
factory.setValidating(true);
factory.setNamespaceAware(true);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
try {
parser = factory.newDocumentBuilder();
} catch (ParserConfigurationException e) {
log.error("Unable to setup XML parser for reading conf", e);
return;
}
log.debug("XML Parser: " + parser.getClass().getName());
parser.setErrorHandler(handler);
parser.setEntityResolver(handler);
try {
log.debug("about to parse conf");
Document doc = parser.parse(inputStream, confSystemId);
processConfDoc(doc);
} catch (SAXParseException e) {
addError("Parse error on line " + e.getLineNumber() + " " + e.getMessage(), e);
} catch (Exception e) {
addError("Exception loading conf " + " " + e.getMessage(), e);
}
}
/**
* Process dom document and populate Conf object.
* <p/>
* Note, protected so that is can be extended.
*/
protected void processConfDoc(Document doc) {
Element rootElement = doc.getDocumentElement();
if ("true".equalsIgnoreCase(getAttrValue(rootElement, "use-query-string"))) setUseQueryString(true);
if ("true".equalsIgnoreCase(getAttrValue(rootElement, "use-context"))) {
log.debug("use-context set to true");
setUseContext(true);
}
setDecodeUsing(getAttrValue(rootElement, "decode-using"));
setDefaultMatchType(getAttrValue(rootElement, "default-match-type"));
NodeList rootElementList = rootElement.getChildNodes();
for (int i = 0; i < rootElementList.getLength(); i++) {
Node node = rootElementList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE &&
((Element) node).getTagName().equals("rule")) {
Element ruleElement = (Element) node;
// we have a rule node
NormalRule rule = new NormalRule();
processRuleBasics(ruleElement, rule);
procesConditions(ruleElement, rule);
processRuns(ruleElement, rule);
Node toNode = ruleElement.getElementsByTagName("to").item(0);
rule.setTo(getNodeValue(toNode));
rule.setToType(getAttrValue(toNode, "type"));
rule.setToContextStr(getAttrValue(toNode, "context"));
rule.setToLast(getAttrValue(toNode, "last"));
rule.setQueryStringAppend(getAttrValue(toNode, "qsappend"));
if ("true".equalsIgnoreCase(getAttrValue(toNode, "encode"))) rule.setEncodeToUrl(true);
processSetAttributes(ruleElement, rule);
addRule(rule);
} else if (node.getNodeType() == Node.ELEMENT_NODE &&
((Element) node).getTagName().equals("class-rule")) {
Element ruleElement = (Element) node;
ClassRule classRule = new ClassRule();
if ("false".equalsIgnoreCase(getAttrValue(ruleElement, "enabled"))) classRule.setEnabled(false);
if ("false".equalsIgnoreCase(getAttrValue(ruleElement, "last"))) classRule.setLast(false);
classRule.setClassStr(getAttrValue(ruleElement, "class"));
classRule.setMethodStr(getAttrValue(ruleElement, "method"));
addRule(classRule);
} else if (node.getNodeType() == Node.ELEMENT_NODE &&
((Element) node).getTagName().equals("outbound-rule")) {
Element ruleElement = (Element) node;
// we have a rule node
OutboundRule rule = new OutboundRule();
processRuleBasics(ruleElement, rule);
if ("true".equalsIgnoreCase(getAttrValue(ruleElement, "encodefirst"))) rule.setEncodeFirst(true);
procesConditions(ruleElement, rule);
processRuns(ruleElement, rule);
Node toNode = ruleElement.getElementsByTagName("to").item(0);
rule.setTo(getNodeValue(toNode));
rule.setToLast(getAttrValue(toNode, "last"));
if ("false".equalsIgnoreCase(getAttrValue(toNode, "encode"))) rule.setEncodeToUrl(false);
processSetAttributes(ruleElement, rule);
addOutboundRule(rule);
} else if (node.getNodeType() == Node.ELEMENT_NODE &&
((Element) node).getTagName().equals("catch")) {
Element catchXMLElement = (Element) node;
// we have a rule node
CatchElem catchElem = new CatchElem();
catchElem.setClassStr(getAttrValue(catchXMLElement, "class"));
processRuns(catchXMLElement, catchElem);
catchElems.add(catchElem);
}
}
docProcessed = true;
}
private void processRuleBasics(Element ruleElement, RuleBase rule) {
if ("false".equalsIgnoreCase(getAttrValue(ruleElement, "enabled"))) rule.setEnabled(false);
String ruleMatchType = getAttrValue(ruleElement, "match-type");
if (StringUtils.isBlank(ruleMatchType)) ruleMatchType = defaultMatchType;
rule.setMatchType(ruleMatchType);
Node nameNode = ruleElement.getElementsByTagName("name").item(0);
rule.setName(getNodeValue(nameNode));
Node noteNode = ruleElement.getElementsByTagName("note").item(0);
rule.setNote(getNodeValue(noteNode));
Node fromNode = ruleElement.getElementsByTagName("from").item(0);
rule.setFrom(getNodeValue(fromNode));
if ("true".equalsIgnoreCase(getAttrValue(fromNode, "casesensitive"))) rule.setFromCaseSensitive(true);
}
private static void processSetAttributes(Element ruleElement, RuleBase rule) {
NodeList setNodes = ruleElement.getElementsByTagName("set");
for (int j = 0; j < setNodes.getLength(); j++) {
Node setNode = setNodes.item(j);
if (setNode == null) continue;
SetAttribute setAttribute = new SetAttribute();
setAttribute.setValue(getNodeValue(setNode));
setAttribute.setType(getAttrValue(setNode, "type"));
setAttribute.setName(getAttrValue(setNode, "name"));
rule.addSetAttribute(setAttribute);
}
}
private static void processRuns(Element ruleElement, Runnable runnable) {
NodeList runNodes = ruleElement.getElementsByTagName("run");
for (int j = 0; j < runNodes.getLength(); j++) {
Node runNode = runNodes.item(j);
if (runNode == null) continue;
Run run = new Run();
processInitParams(runNode, run);
run.setClassStr(getAttrValue(runNode, "class"));
run.setMethodStr(getAttrValue(runNode, "method"));
run.setJsonHandler("true".equalsIgnoreCase(getAttrValue(runNode, "jsonhandler")));
run.setNewEachTime("true".equalsIgnoreCase(getAttrValue(runNode, "neweachtime")));
runnable.addRun(run);
}
// gzip element is just a shortcut to run: org.tuckey.web.filters.urlrewrite.gzip.GzipFilter
NodeList gzipNodes = ruleElement.getElementsByTagName("gzip");
for (int j = 0; j < gzipNodes.getLength(); j++) {
Node runNode = gzipNodes.item(j);
if (runNode == null) continue;
Run run = new Run();
run.setClassStr(GzipFilter.class.getName());
run.setMethodStr("doFilter(ServletRequest, ServletResponse, FilterChain)");
processInitParams(runNode, run);
runnable.addRun(run);
}
}
private static void processInitParams(Node runNode, Run run) {
if (runNode.getNodeType() == Node.ELEMENT_NODE) {
Element runElement = (Element) runNode;
NodeList initParamsNodeList = runElement.getElementsByTagName("init-param");
for (int k = 0; k < initParamsNodeList.getLength(); k++) {
Node initParamNode = initParamsNodeList.item(k);
if (initParamNode == null) continue;
if (initParamNode.getNodeType() != Node.ELEMENT_NODE) continue;
Element initParamElement = (Element) initParamNode;
Node paramNameNode = initParamElement.getElementsByTagName("param-name").item(0);
Node paramValueNode = initParamElement.getElementsByTagName("param-value").item(0);
run.addInitParam(getNodeValue(paramNameNode), getNodeValue(paramValueNode));
}
}
}
private static void procesConditions(Element ruleElement, RuleBase rule) {
NodeList conditionNodes = ruleElement.getElementsByTagName("condition");
for (int j = 0; j < conditionNodes.getLength(); j++) {
Node conditionNode = conditionNodes.item(j);
if (conditionNode == null) continue;
Condition condition = new Condition();
condition.setValue(getNodeValue(conditionNode));
condition.setType(getAttrValue(conditionNode, "type"));
condition.setName(getAttrValue(conditionNode, "name"));
condition.setNext(getAttrValue(conditionNode, "next"));
condition.setCaseSensitive("true".equalsIgnoreCase(getAttrValue(conditionNode, "casesensitive")));
condition.setOperator(getAttrValue(conditionNode, "operator"));
rule.addCondition(condition);
}
}
private static String getNodeValue(Node node) {
if (node == null) return null;
NodeList nodeList = node.getChildNodes();
if (nodeList == null) return null;
Node child = nodeList.item(0);
if (child == null) return null;
if ((child.getNodeType() == Node.TEXT_NODE)) {
String value = ((Text) child).getData();
return value.trim();
}
return null;
}
private static String getAttrValue(Node n, String attrName) {
if (n == null) return null;
NamedNodeMap attrs = n.getAttributes();
if (attrs == null) return null;
Node attr = attrs.getNamedItem(attrName);
if (attr == null) return null;
String val = attr.getNodeValue();
if (val == null) return null;
return val.trim();
}
/**
* Initialise the conf file. This will run initialise on each rule and condition in the conf file.
*/
public void initialise() {
if (log.isDebugEnabled()) {
log.debug("now initialising conf");
}
initDecodeUsing(decodeUsing);
boolean rulesOk = true;
for (int i = 0; i < rules.size(); i++) {
final Rule rule = (Rule) rules.get(i);
if (!rule.initialise(context)) {
// if we failed to initialise anything set the status to bad
rulesOk = false;
}
}
for (int i = 0; i < outboundRules.size(); i++) {
final OutboundRule outboundRule = (OutboundRule) outboundRules.get(i);
if (!outboundRule.initialise(context)) {
// if we failed to initialise anything set the status to bad
rulesOk = false;
}
}
for (int i = 0; i < catchElems.size(); i++) {
final CatchElem catchElem = (CatchElem) catchElems.get(i);
if (!catchElem.initialise(context)) {
// if we failed to initialise anything set the status to bad
rulesOk = false;
}
}
if (rulesOk) {
ok = true;
}
if (log.isDebugEnabled()) {
log.debug("conf status " + ok);
}
}
private void initDecodeUsing(String decodeUsingSetting) {
decodeUsingSetting = StringUtils.trimToNull(decodeUsingSetting);
if (decodeUsingSetting == null) decodeUsingSetting = DEFAULT_DECODE_USING;
if ( decodeUsingSetting.equalsIgnoreCase(HEADER_DECODE_USING)) { // is 'header'
decodeUsingEncodingHeader = true;
decodeUsingSetting = null;
} else if ( decodeUsingSetting.startsWith(HEADER_DECODE_USING + ",")) { // is 'header,xxx'
decodeUsingEncodingHeader = true;
decodeUsingSetting = decodeUsingSetting.substring((HEADER_DECODE_USING + ",").length());
}
if (NONE_DECODE_USING.equalsIgnoreCase(decodeUsingSetting)) {
decodeUsingSetting = null;
}
if ( decodeUsingSetting != null ) {
try {
URLDecoder.decode("testUrl", decodeUsingSetting);
this.decodeUsing = decodeUsingSetting;
} catch (UnsupportedEncodingException e) {
addError("unsupported 'decodeusing' " + decodeUsingSetting + " see Java SDK docs for supported encodings");
}
} else {
this.decodeUsing = null;
}
}
/**
* Destory the conf gracefully.
*/
public void destroy() {
for (int i = 0; i < rules.size(); i++) {
final Rule rule = (Rule) rules.get(i);
rule.destroy();
}
}
/**
* Will add the rule to the rules list.
*
* @param rule The Rule to add
*/
public void addRule(final Rule rule) {
rule.setId(ruleIdCounter++);
rules.add(rule);
}
/**
* Will add the rule to the rules list.
*
* @param outboundRule The outbound rule to add
*/
public void addOutboundRule(final OutboundRule outboundRule) {
outboundRule.setId(outboundRuleIdCounter++);
outboundRules.add(outboundRule);
}
/**
* Will get the List of errors.
*
* @return the List of errors
*/
public List getErrors() {
return errors;
}
/**
* Will get the List of rules.
*
* @return the List of rules
*/
public List getRules() {
return rules;
}
/**
* Will get the List of outbound rules.
*
* @return the List of outbound rules
*/
public List getOutboundRules() {
return outboundRules;
}
/**
* true if the conf has been loaded ok.
*
* @return boolean
*/
public boolean isOk() {
return ok;
}
private void addError(final String errorMsg, final Exception e) {
errors.add(errorMsg);
log.error(errorMsg, e);
}
private void addError(final String errorMsg) {
errors.add(errorMsg);
}
public Date getLoadedDate() {
return (Date) loadedDate.clone();
}
public String getFileName() {
return fileName;
}
public boolean isUseQueryString() {
return useQueryString;
}
public void setUseQueryString(boolean useQueryString) {
this.useQueryString = useQueryString;
}
public boolean isUseContext() {
return useContext;
}
public void setUseContext(boolean useContext) {
this.useContext = useContext;
}
public String getDecodeUsing() {
return decodeUsing;
}
public void setDecodeUsing(String decodeUsing) {
this.decodeUsing = decodeUsing;
}
public void setDefaultMatchType(String defaultMatchType) {
if (RuleBase.MATCH_TYPE_WILDCARD.equalsIgnoreCase(defaultMatchType)) {
this.defaultMatchType = RuleBase.MATCH_TYPE_WILDCARD;
} else {
this.defaultMatchType = RuleBase.DEFAULT_MATCH_TYPE;
}
}
public String getDefaultMatchType() {
return defaultMatchType;
}
public List getCatchElems() {
return catchElems;
}
public boolean isDecodeUsingCustomCharsetRequired() {
return decodeUsing != null;
}
public boolean isEngineEnabled() {
return engineEnabled;
}
public void setEngineEnabled(boolean engineEnabled) {
this.engineEnabled = engineEnabled;
}
public boolean isLoadedFromFile() {
return fileName != null;
}
public boolean isDecodeUsingEncodingHeader() {
return decodeUsingEncodingHeader;
}
} |
package com.matthewtamlin.sliding_intro_screen_library.buttons;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.view.View;
/**
* Provides indirect access to an IntroButton, to allow limited modification and inspection. Each
* accessor provides access to a single IntroButton only. This class is immutable but not
* necessarily thread-safe, as its methods interact with non-thread safe objects.
* <p>
* See {@link IntroButton}.
*/
public final class IntroButtonAccessor {
/**
* The IntroButton this accessor provides access to.
*/
private final IntroButton button;
/**
* Constructs a new IntroButtonAccessor instance.
*
* @param button
* the IntroButton to provide access to
*/
public IntroButtonAccessor(final IntroButton button) {
this.button = button;
}
public final void setBehaviour(final IntroButton.Behaviour behaviour) {
button.setBehaviour(behaviour);
}
/**
* @return the current Behaviour of the accessed IntroButton, not null
*/
public final IntroButton.Behaviour getBehaviour() {
return button.getBehaviour();
}
public final void setAppearance(final IntroButton.Appearance appearance) {
button.setAppearance(appearance);
}
/**
* @return the current Appearance of the accessed IntroButton, not null
*/
public final IntroButton.Appearance getAppearance() {
return button.getAppearance();
}
/**
* Sets the text to be displayed by the accessed IntroButton. The text will only be displayed
* when its Appearance is set to {@code Appearance.TEXT_ONLY}, {@code Appearance.ICON_TEXT_LEFT}
* or {@code Appearance.ICON_TEXT_RIGHT}. The text is linked to a Behaviour class, and will only
* be used when the button is using an instance of that Behaviour class.
*
* @param text
* the text to display
* @param behaviourClass
* the Behaviour class to associate the text with, null to use the current Behaviour
*/
public final void setText(final CharSequence text, final Class<? extends IntroButton.Behaviour>
behaviourClass) {
button.setLabel(text, behaviourClass);
}
/**
* Returns the text displayed by the accessed IntroButton for a particular Behaviour class. Note
* that the text may not currently be visible.
*
* @param behaviourClass
* the Behaviour class to get the associated text of, null to use the the current Behaviour
* @return the text for the Behaviour class, null if there is none
*/
public final CharSequence getText(final Class<? extends IntroButton.Behaviour> behaviourClass) {
return button.getLabel(behaviourClass);
}
/**
* Sets the icon to be displayed by the accessed IntroButton. The icon will only be displayed
* when its Appearance is set to {@code Appearance.ICON_ONLY}, {@code Appearance.ICON_TEXT_LEFT}
* or {@code Appearance.ICON_TEXT_RIGHT}. The icon is linked to a Behaviour class, and will only
* be used when the IntroButton is using an instance of that Behaviour class.
*
* @param icon
* the icon to display
* @param behaviourClass
* the Behaviour class to associate the icon with, null to use the current Behaviour
*/
public final void setIcon(final Drawable icon, final Class<? extends IntroButton.Behaviour>
behaviourClass) {
button.setIcon(icon, behaviourClass);
}
/**
* Returns the icon displayed by this IntroButton for a particular Behaviour class. Note that
* the icon may not currently be visible.
*
* @param behaviourClass
* the Behaviour class to get the associated icon of, null to use the the current Behaviour
* @return the icon for the Behaviour class, null if there is none
*/
public final Drawable getIcon(final Class<? extends IntroButton.Behaviour> behaviourClass) {
return button.getIcon(behaviourClass);
}
/**
* Sets the text color of the accessed IntroButton.
*
* @param color
* the text color, as an ARGB hex code
*/
public final void setTextColor(final int color) {
button.setTextColor(color);
}
/**
* @return the text color of the accessed button, as an ARGB hex code
*/
public final int getTextColor() {
return button.getCurrentTextColor();
}
/**
* Sets the typeface and style of the accessed IntroButton.
*
* @param tf
* the typeface to use
* @param style
* the style to use
*/
public final void setTypeface(final Typeface tf, final int style) {
button.setTypeface(tf, style);
}
/**
* Sets the typeface of the accessed IntroButton.
*
* @param tf
* the typeface to use
*/
public final void setTypeface(final Typeface tf) {
button.setTypeface(tf);
}
/**
* Sets the size of the text displayed in the accessed IntroButton.
*
* @param textSizeSp
* the size to use, measured in scaled-pixels
*/
public final void setTextSize(final float textSizeSp) {
button.setTextSize(textSizeSp);
}
/**
* @return the size of the text currently displayed in the accessed IntroButton, measured in
* pixels
*/
public final float getTextSize() {
return button.getTextSize();
}
/**
* Sets the on-click listener for the accessed IntroButton. This functionality is independent of
* the Behaviour.
*
* @param l
* the listener to receive the callbacks, null to clear any listener
*/
public final void setOnClickListener(final View.OnClickListener l) {
button.setOnClickListener(l);
}
} |
package water.parser;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import water.Key;
import water.TestUtil;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.PrettyPrint;
import water.util.StringUtils;
import java.util.Random;
import java.util.UUID;
import static water.parser.DefaultParserProviders.CSV_INFO;
public class ParserTest2 extends TestUtil {
@BeforeClass
public static void setup() {
stall_till_cloudsize(1);
}
private final char[] SEPARATORS = new char[] {',', ' '};
private static void testParsed(Frame fr, String[][] expected) {
Assert.assertEquals(expected .length,fr.numRows());
Assert.assertEquals(expected[0].length,fr.numCols());
for( int j = 0; j < fr.numCols(); ++j ) {
Vec vec = fr.vecs()[j];
for( int i = 0; i < expected.length; ++i ) {
if( expected[i][j]==null )
Assert.assertTrue(i+" -- "+j, vec.isNA(i));
else {
String pval = vec.domain()[(int)vec.at8(i)];
Assert.assertTrue(expected[i][j]+" -- "+pval,expected[i][j].equals(pval));
}
}
}
fr.delete();
}
@Test public void testNAs() {
String [] data = new String[]{
"'C1Chunk',C1SChunk, 'C2Chunk', 'C2SChunk', 'C4Chunk', 'C4FChunk', 'C8Chunk', 'C8DChunk', 'Categorical'\n" +
"0, 0.0, 0, 0, 0, 0 , 0, 8.878979, A \n" ,
"1, 0.1, 1, 0.1, 1, 1 , 1, 1.985934, B \n" ,
"2, 0.2, 2, 0.2, 2, 2 , 2, 3.398018, C \n" ,
"3, 0.3, 3, 0.3, 3, 3 , 3, 9.329589, D \n" ,
"4, 0.4, 4, 4, 4, 4 , 2147483649, 0.290184, A \n" ,
"0, 0.5, 0, 0, -100000, 1.234e2 ,-2147483650, 1e-30, B \n" ,
"254, 0.25, 2550, 6553.4, 100000, 2.345e-2, 0, 1e30, C \n" ,
" , , , , , , , , \n" ,
"?, NA, ?, ?, ?, ?, ?, ?, \n" ,
};
Key rkey = ParserTest.makeByteVec(data);
ParseSetup ps = new ParseSetup(CSV_INFO, (byte)',', false, ParseSetup.HAS_HEADER, 9,
new String[]{"'C1Chunk'","C1SChunk", "'C2Chunk'", "'C2SChunk'", "'C4Chunk'", "'C4FChunk'", "'C8Chunk'", "'C8DChunk'", "'Categorical'"},
ParseSetup.strToColumnTypes(new String[]{"Numeric", "Numeric", "Numeric", "Numeric", "Numeric", "Numeric", "Numeric", "Numeric", "Enum"}), null, null, null);
Frame fr = ParseDataset.parse(Key.make("na_test.hex"), new Key[]{rkey}, true, ps);
int nlines = (int)fr.numRows();
Assert.assertEquals(9,nlines);
Assert.assertEquals(9,fr.numCols());
for(int i = 0; i < nlines-2; ++i)
for( Vec v : fr.vecs() )
Assert.assertTrue("error at line "+i+", vec " + v.chunkForChunkIdx(0).getClass().getSimpleName(),
!Double.isNaN(v.at(i)) && !v.isNA(i) );
for( int j=0; j<fr.vecs().length; j++ ) {
Vec v = fr.vecs()[j];
for( int i = nlines-2; i < nlines; ++i )
Assert.assertTrue(i + ", " + j + ":" + v.at(i) + ", " + v.isNA(i), Double.isNaN(v.at(i)) && v.isNA(i) );
}
fr.delete();
}
@Test public void testSingleQuotes(){
String[] data = new String[]{"'Tomass,test,first,line'\n'Tomas''s,test2',test2\nlast,'line''","s, trailing, piece'"};
String[][] expectFalse = new String[][] { ar("'Tomass" ,"test" ,"first","line'"),
ar("'Tomas''s","test2'","test2",null),
ar("last","'line''s","trailing","piece'") };
Key k = ParserTest.makeByteVec(data);
ParseSetup gSetupF = ParseSetup.guessSetup(null, StringUtils.bytesOf(data[0]), new ParseSetup(CSV_INFO, (byte)',', false/*single quote*/, 4, ParseSetup.NO_HEADER, null, null));
gSetupF._column_types = ParseSetup.strToColumnTypes(new String[]{"Enum", "Enum", "Enum", "Enum"});
Frame frF = ParseDataset.parse(Key.make(), new Key[]{k}, false, gSetupF);
testParsed(frF,expectFalse);
String[][] expectTrue = new String[][] { ar("Tomass,test,first,line", null),
ar("Tomas''stest2","test2"),
ar("last", "lines trailing piece") };
ParseSetup gSetupT = ParseSetup.guessSetup(null, StringUtils.bytesOf(data[0]), new ParseSetup(CSV_INFO, (byte)',', true/*single quote*/, 2, ParseSetup.NO_HEADER, null, null));
gSetupT._column_types = ParseSetup.strToColumnTypes(new String[]{"Enum", "Enum", "Enum", "Enum"});
Frame frT = ParseDataset.parse(Key.make(), new Key[]{k}, true, gSetupT);
//testParsed(frT,expectTrue); // not currently passing
frT.delete();
}
@Test public void testSingleQuotes2() {
Frame fr = parse_test_file("smalldata/junit/test_quote.csv");
Assert.assertEquals(fr.numCols(),11);
Assert.assertEquals(fr.numRows(), 7);
fr.delete();
}
@Test public void testDoubleQuotes() {
Frame fr = null;
try {
String[] data = new String[]{"Tomass,test,\"Feline says \"\"meh\"\".\",line\nTomass,test2,second,line\nTomass,test3,last,line"};
Key k = ParserTest.makeByteVec(data);
ParseSetup gSetupF = ParseSetup.guessSetup(null, StringUtils.bytesOf(data[0]), new ParseSetup(CSV_INFO, (byte)',', false/*single quote*/, ParseSetup.NO_HEADER, 4, null, null));
gSetupF._column_types = ParseSetup.strToColumnTypes(new String[]{"String", "String", "String", "String"});
fr = ParseDataset.parse(Key.make(), new Key[]{k}, true, gSetupF);
BufferedString str = new BufferedString();
Vec[] vecs = fr.vecs();
Assert.assertEquals(fr.numCols(),4);
Assert.assertEquals(fr.numRows(), 3);
Assert.assertEquals("Feline says \"meh\".", vecs[2].atStr(str, 0).toString());
fr.delete();
}
finally {
if( fr != null ) fr.delete();
}
}
// Test very sparse data
@Test public void testSparse() {
// Build 100 zero's and 1 one.
double[][] exp = new double[101][1];
exp[50][0] = 1;
StringBuilder sb = new StringBuilder();
for( int i=0; i<50; i++ ) sb.append("0.0\n");
sb.append("1.0\n");
for( int i=0; i<50; i++ ) sb.append("0.0\n");
Key k = ParserTest.makeByteVec(sb.toString());
ParserTest.testParsed(ParseDataset.parse(Key.make(), k),exp,101);
// Build 100 zero's and 1 non-zero.
exp = new double[101][1];
exp[50][0] = 2;
sb = new StringBuilder();
for( int i=0; i<50; i++ ) sb.append("0\n");
sb.append("2\n");
for( int i=0; i<50; i++ ) sb.append("0\n");
k = ParserTest.makeByteVec(sb.toString());
ParserTest.testParsed(ParseDataset.parse(Key.make(), k),exp,101);
// Build 100 zero's and some non-zeros. Last line is truncated.
for (char sep : SEPARATORS) {
exp = new double[101][2];
exp[ 50][0] = 2;
exp[ 50][1] = 3;
exp[100][0] = 0; // Truncated final line
exp[100][1] = Double.NaN;
sb = new StringBuilder();
for( int i=0; i<50; i++ ) sb.append("0").append(sep).append("0\n");
sb.append("2").append(sep).append("3\n");
for( int i=0; i<49; i++ ) sb.append("0").append(sep).append("0\n");
sb.append("0"); // Truncated final line
k = ParserTest.makeByteVec(sb.toString());
ParserTest.testParsed(ParseDataset.parse(Key.make(), k),exp,101);
}
// Build 100000 zero's and some one's
sb = new StringBuilder();
exp = new double[100100][1];
for( int i=0; i<100; i++ ) {
for( int j=0; j<1000; j++ )
sb.append("0\n");
sb.append("1\n");
exp[i*1001+1000][0]=1;
}
k = ParserTest.makeByteVec(sb.toString());
ParserTest.testParsed(ParseDataset.parse(Key.make(), k),exp,100100);
// Build 100 zero's, then 100 mix of -1001 & 1001's (to force a
// sparse-short, that finally inflates to a full dense-short).
sb = new StringBuilder();
for( int i=0; i<100; i++ ) sb.append("0\n");
for( int i=0; i<100; i+=2 ) sb.append("-1001\n1001\n");
exp = new double[200][1];
for( int i=0; i<100; i+=2 ) { exp[i+100][0]=-1001; exp[i+101][0]= 1001; }
k = ParserTest.makeByteVec(sb.toString());
ParserTest.testParsed(ParseDataset.parse(Key.make(), k),exp,200);
}
// test correctnes of sparse chunks
// added after failing to encode properly following data as
// 0s were not considered when computing compression strategy and then
// lemin was 6108 and there was Short overflow when encoding zeros.
// So, the first column was compressed into C2SChunk with 0s causing short overflow,
@Test public void testSparse2(){
String data =
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"35351, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"6108, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"35351, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"6334, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n" +
"0, 0,0,0,0,0\n";
double[][] exp = new double[][] {
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(35351,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(6108,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(35351,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(6334,0,0,0,0,0),
ard(0,0,0,0,0,0),
ard(0,0,0,0,0,0),
};
Key k = ParserTest.makeByteVec(data);
ParserTest.testParsed(ParseDataset.parse(Key.make(), k),exp,33);
}
@Ignore
public void testSpeedOfCategoricalUpdate() {
Categorical cat = new Categorical();
int numOfUniqueCats = 363;
String values[] = new String[numOfUniqueCats];
for (int i = 0; i< numOfUniqueCats; i++) values[i] = UUID.randomUUID().toString();
int numOfIterations = 1000000000;
Random random = new Random(0xf267deadbabecafeL);
BufferedString bs = new BufferedString();
long startTime = System.currentTimeMillis();
for (int i = 0; i < numOfIterations; i++) {
int idx = random.nextInt(numOfUniqueCats);
bs.set(StringUtils.bytesOf(values[idx]));
cat.addKey(bs);
if (i % 10000000 == 0) System.out.println("Iterations: " + i);
}
System.out.println("Time: " + PrettyPrint.msecs(System.currentTimeMillis() - startTime, false));
}
} |
package com.matthewtamlin.sliding_intro_screen_library.buttons;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.view.View;
/**
* Provides indirect access to an IntroButton, to allow limited modification and inspection. Each
* accessor provides access to a single IntroButton only.
* <p/>
* See {@link IntroButton}.
*/
public final class IntroButtonAccessor {
/**
* The IntroButton this accessor provides access to.
*/
private final IntroButton button;
public IntroButtonAccessor(final IntroButton button) {
if (button == null) {
throw new IllegalArgumentException("button cannot be null");
}
this.button = button;
}
public final void setBehaviour(final IntroButton.Behaviour behaviour) {
button.setBehaviour(behaviour);
}
/**
* Returns the current Behaviour of the accessed IntroButton.
* <p/>
* See {@link com.matthewtamlin.sliding_intro_screen_library.buttons.IntroButton.Behaviour}.
*
* @return the current Behaviour, not null
*/
public final IntroButton.Behaviour getBehaviour() {
return button.getBehaviour();
}
public final void setAppearance(final IntroButton.Appearance appearance) {
button.setAppearance(appearance);
}
/**
* Returns the current Appearance of the accessed IntroButton.
* <p/>
* See {@link com.matthewtamlin.sliding_intro_screen_library.buttons.IntroButton.Appearance}.
*
* @return the current Appearance, not null
*/
public final IntroButton.Appearance getAppearance() {
return button.getAppearance();
}
/**
* Sets the text to be displayed by the accessed IntroButton. The text will only be displayed
* when the Appearance is set to {@code Appearance.TEXT_ONLY}, {@code Appearance.ICON_TEXT_LEFT}
* or {@code Appearance.ICON_TEXT_RIGHT}. The text is linked to a Behaviour class, and will only
* be shown when the button is using an instance of that Behaviour class.
*
* @param text
* the text to display
* @param behaviourClass
* the Behaviour class to associate the text with, null to use the current Behaviour
*/
public final void setText(final CharSequence text, final Class<? extends IntroButton.Behaviour>
behaviourClass) {
button.setLabel(text, behaviourClass);
}
/**
* Returns the text displayed by the accessed IntroButton for a particular Behaviour class. Note
* that the text may not currently be visible.
*
* @param behaviourClass
* the Behaviour class to get the associated text of, null to use the the current Behaviour
* @return the text for the Behaviour class, null if there is none
*/
public final CharSequence getText(final Class<? extends IntroButton.Behaviour> behaviourClass) {
return button.getLabel(behaviourClass);
}
/**
* Sets the icon to be displayed by the accessed IntroButton. The icon will only be displayed
* when the Appearance is set to {@code Appearance.ICON_ONLY}, {@code Appearance.ICON_TEXT_LEFT}
* or {@code Appearance.ICON_TEXT_RIGHT}. The icon is linked to a Behaviour class, and will only
* be shown when the IntroButton is using an instance of that Behaviour class.
*
* @param icon
* the icon to display
* @param behaviourClass
* the Behaviour class to associate the icon with, null to use the current Behaviour
*/
public final void setIcon(final Drawable icon, final Class<? extends IntroButton.Behaviour>
behaviourClass) {
button.setIcon(icon, behaviourClass);
}
/**
* Returns the icon displayed by this IntroButton for a particular Behaviour class. Note that
* the icon may not currently be visible.
*
* @param behaviourClass
* the Behaviour class to get the associated icon of, null to use the the current Behaviour
* @return the icon for the Behaviour class, null if there is none
*/
public final Drawable getIcon(final Class<? extends IntroButton.Behaviour> behaviourClass) {
return button.getIcon(behaviourClass);
}
/**
* Sets the text color of the accessed IntroButton.
*
* @param color
* the text color, as an ARGB hex code
*/
public final void setTextColor(final int color) {
button.setTextColor(color);
}
/**
* @return the text color of the accessed button, as an ARGB hex code
*/
public final int getTextColor() {
return button.getCurrentTextColor();
}
/**
* Sets the typeface of the accessed IntroButton.
*
* @param tf
* the typeface to use
*/
public final void setTypeface(final Typeface tf) {
button.setTypeface(tf);
}
/**
* Sets the size of the text displayed in the accessed IntroButton.
*
* @param textSizeSp
* the size to use, measured in scaled-pixels
*/
public final void setTextSize(final float textSizeSp) {
button.setTextSize(textSizeSp);
}
/**
* @return the size of the text currently displayed in the accessed IntroButton, measured in
* pixels
*/
public final float getTextSize() {
return button.getTextSize();
}
/**
* Sets the on-click listener for the accessed IntroButton. This functionality is independent of
* the Behaviour.
*
* @param l
* the listener to receive the callbacks, null to clear any existing listener
*/
public final void setOnClickListener(final View.OnClickListener l) {
button.setOnClickListener(l);
}
} |
package io.spine.server.bus;
import com.google.common.testing.NullPointerTester;
import com.google.protobuf.Any;
import com.google.protobuf.Message;
import io.spine.base.Error;
import io.spine.core.Ack;
import io.spine.core.Rejection;
import io.spine.grpc.MemoizingObserver;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.DeadMessageException;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.FailedValidationException;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.FailingFilterException;
import io.spine.server.bus.given.BusesTestEnv.Filters.FailingFilter;
import io.spine.server.bus.given.BusesTestEnv.Filters.PassingFilter;
import io.spine.server.bus.given.BusesTestEnv.TestMessageBus;
import io.spine.test.bus.BusMessage;
import org.junit.Test;
import java.util.List;
import static io.spine.grpc.StreamObservers.memoizingObserver;
import static io.spine.server.bus.given.BusesTestEnv.STATUS_OK;
import static io.spine.server.bus.given.BusesTestEnv.busMessage;
import static io.spine.server.bus.given.BusesTestEnv.errorType;
import static io.spine.server.bus.given.BusesTestEnv.testContents;
import static io.spine.test.Tests.assertHasPrivateParameterlessCtor;
import static io.spine.test.Verify.assertSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author Dmytro Dashenkov
*/
public class BusesShould {
@Test
public void have_private_util_ctor() {
assertHasPrivateParameterlessCtor(Buses.class);
}
@Test
public void not_accept_nulls() {
new NullPointerTester()
.setDefault(Message.class, Any.getDefaultInstance())
.setDefault(Error.class, Error.newBuilder()
.setCode(1)
.build())
.setDefault(Rejection.class, Rejection.newBuilder()
.setMessage(Any.getDefaultInstance())
.build())
.testAllPublicStaticMethods(Buses.class);
}
@Test
public void deliver_a_valid_message_with_registered_dispatcher() {
final TestMessageBus bus = busBuilder().build();
final BusMessage message = busMessage(testContents());
final MemoizingObserver<Ack> observer = memoizingObserver();
bus.post(message, observer);
final List<Ack> responses = observer.responses();
assertSize(1, responses);
final Ack response = responses.get(0);
assertEquals(STATUS_OK, response.getStatus());
}
@Test
public void apply_the_validating_filter_prior_to_the_dead_message_filter() {
final TestMessageBus deadBusFailingValidation = busBuilder().withNoDispatchers()
.failingValidation()
.build();
assertBusPostErrs(deadBusFailingValidation, FailedValidationException.TYPE);
}
@Test
public void apply_registered_filters_prior_to_the_validating_filter() {
final TestMessageBus deadBusFailingValidation = busBuilder().withNoDispatchers()
.failingValidation()
.addFilter(new FailingFilter())
.build();
assertBusPostErrs(deadBusFailingValidation, FailingFilterException.TYPE);
}
@Test
public void apply_the_validating_filter() {
final TestMessageBus busFailingValidation = busBuilder().failingValidation()
.build();
assertBusPostErrs(busFailingValidation, FailedValidationException.TYPE);
}
@Test
public void apply_a_registered_filter() {
final TestMessageBus bus = busBuilder().addFilter(new FailingFilter())
.build();
assertBusPostErrs(bus, FailingFilterException.TYPE);
}
@Test
public void apply_registered_filters() {
final PassingFilter passingFilter = new PassingFilter();
final PassingFilter passingFilter2 = new PassingFilter();
final TestMessageBus bus = busBuilder().addFilter(passingFilter)
.addFilter(passingFilter2)
.addFilter(new FailingFilter())
.build();
assertBusPostErrs(bus, FailingFilterException.TYPE);
assertTrue(passingFilter.passed());
assertTrue(passingFilter2.passed());
}
@Test
public void apply_the_dead_message_filter() {
final TestMessageBus deadBus = busBuilder().withNoDispatchers()
.build();
assertBusPostErrs(deadBus, DeadMessageException.TYPE);
}
private static TestMessageBus.Builder busBuilder() {
return TestMessageBus.newBuilder();
}
/**
* Asserts that bus acknowledges the error when posting a single message.
*/
private static void assertBusPostErrs(TestMessageBus bus, String type) {
final BusMessage message = busMessage(testContents());
final MemoizingObserver<Ack> observer = memoizingObserver();
bus.post(message, observer);
final List<Ack> responses = observer.responses();
assertSize(1, responses);
final Ack response = responses.get(0);
assertEquals(type, errorType(response));
assertSize(0, bus.storedMessages());
}
} |
package com.linkedin.metadata.search.elasticsearch.indexbuilder;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.MapDifference;
import com.google.common.collect.Maps;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nonnull;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.client.GetAliasesResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.GetIndexRequest;
import org.elasticsearch.client.indices.GetMappingsRequest;
import org.elasticsearch.client.tasks.TaskSubmissionResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.ReindexRequest;
@Slf4j
@RequiredArgsConstructor
public class ESIndexBuilder {
private final RestHighLevelClient searchClient;
private final int numShards;
private final int numReplicas;
private static final int NUM_RETRIES = 3;
private static final List<String> SETTINGS_TO_COMPARE = ImmutableList.of("number_of_shards", "number_of_replicas");
public void buildIndex(String indexName, Map<String, Object> mappings, Map<String, Object> settings)
throws IOException {
// Check if index exists
boolean exists = searchClient.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT);
Map<String, Object> baseSettings = new HashMap<>(settings);
baseSettings.put("number_of_shards", numShards);
baseSettings.put("number_of_replicas", numReplicas);
Map<String, Object> finalSettings = ImmutableMap.of("index", baseSettings);
// If index doesn't exist, create index
if (!exists) {
createIndex(indexName, mappings, finalSettings);
return;
}
Map<String, Object> oldMappings = searchClient.indices()
.getMapping(new GetMappingsRequest().indices(indexName), RequestOptions.DEFAULT)
.mappings()
.values()
.stream()
.findFirst()
.get()
.getSourceAsMap();
MapDifference<String, Object> mappingsDiff = Maps.difference(mappings, oldMappings);
Settings oldSettings = searchClient.indices()
.getSettings(new GetSettingsRequest().indices(indexName), RequestOptions.DEFAULT)
.getIndexToSettings()
.valuesIt()
.next();
// If there are no updates to mappings, return
if (mappingsDiff.areEqual() && equals(finalSettings, oldSettings)) {
log.info("No updates to index {}", indexName);
return;
}
if (!mappingsDiff.areEqual()) {
log.info("There's diff between new mappings (left) and old mappings (right): {}", mappingsDiff.toString());
} else {
log.info("There's an update to settings");
}
String tempIndexName = indexName + "_" + System.currentTimeMillis();
createIndex(tempIndexName, mappings, finalSettings);
try {
TaskSubmissionResponse reindexTask;
reindexTask = searchClient.submitReindexTask(new ReindexRequest().setSourceIndices(indexName).setDestIndex(tempIndexName),
RequestOptions.DEFAULT);
// wait up to 5 minutes for the task to complete
long startTime = System.currentTimeMillis();
long millisToWait60Minutes = 1000 * 60 * 60;
Boolean reindexTaskCompleted = false;
while ((System.currentTimeMillis() - startTime) < millisToWait60Minutes) {
log.info("Reindexing from {} to {} in progress...", indexName, tempIndexName);
ListTasksRequest request = new ListTasksRequest();
ListTasksResponse tasks = searchClient.tasks().list(request, RequestOptions.DEFAULT);
if (tasks.getTasks().stream().noneMatch(task -> task.getTaskId().toString().equals(reindexTask.getTask()))) {
log.info("Reindexing {} to {} task has completed, will now check if reindex was successful", indexName, tempIndexName);
reindexTaskCompleted = true;
break;
}
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
log.info("Trouble sleeping while reindexing {} to {}: Exception {}. Retrying...", indexName, tempIndexName, e.toString());
}
}
if (!reindexTaskCompleted) {
throw new RuntimeException(String.format("Reindex from %s to %s failed-- task exceeded 60 minute limit", indexName, tempIndexName));
}
} catch (Exception e) {
log.info("Failed to reindex {} to {}: Exception {}", indexName, tempIndexName, e.toString());
searchClient.indices().delete(new DeleteIndexRequest().indices(tempIndexName), RequestOptions.DEFAULT);
throw e;
}
// Check whether reindex succeeded by comparing document count
// There can be some delay between the reindex finishing and count being fully up to date, so try multiple times
long originalCount = 0;
long reindexedCount = 0;
for (int i = 0; i < NUM_RETRIES; i++) {
// Check if reindex succeeded by comparing document counts
originalCount = getCount(indexName);
reindexedCount = getCount(tempIndexName);
if (originalCount == reindexedCount) {
break;
}
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
if (originalCount != reindexedCount) {
log.info("Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}", originalCount,
reindexedCount);
searchClient.indices().delete(new DeleteIndexRequest().indices(tempIndexName), RequestOptions.DEFAULT);
throw new RuntimeException(String.format("Reindex from %s to %s failed", indexName, tempIndexName));
}
log.info("Reindex from {} to {} succeeded", indexName, tempIndexName);
String indexNamePattern = indexName + "_*";
// Check if the original index is aliased or not
GetAliasesResponse aliasesResponse = searchClient.indices()
.getAlias(new GetAliasesRequest(indexName).indices(indexNamePattern), RequestOptions.DEFAULT);
// If not aliased, delete the original index
if (aliasesResponse.getAliases().isEmpty()) {
searchClient.indices().delete(new DeleteIndexRequest().indices(indexName), RequestOptions.DEFAULT);
} else {
searchClient.indices()
.delete(new DeleteIndexRequest().indices(aliasesResponse.getAliases().keySet().toArray(new String[0])),
RequestOptions.DEFAULT);
}
// Add alias for the new index
AliasActions removeAction = AliasActions.remove().alias(indexName).index(indexNamePattern);
AliasActions addAction = AliasActions.add().alias(indexName).index(tempIndexName);
searchClient.indices()
.updateAliases(new IndicesAliasesRequest().addAliasAction(removeAction).addAliasAction(addAction),
RequestOptions.DEFAULT);
log.info("Finished setting up {}", indexName);
}
private long getCount(@Nonnull String indexName) throws IOException {
return searchClient.count(new CountRequest(indexName).query(QueryBuilders.matchAllQuery()), RequestOptions.DEFAULT)
.getCount();
}
private void createIndex(String indexName, Map<String, Object> mappings, Map<String, Object> settings)
throws IOException {
log.info("Index {} does not exist. Creating", indexName);
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(mappings);
createIndexRequest.settings(settings);
searchClient.indices().create(createIndexRequest, RequestOptions.DEFAULT);
log.info("Created index {}", indexName);
}
private boolean equals(Map<String, Object> newSettings, Settings oldSettings) {
if (!newSettings.containsKey("index")) {
return true;
}
Map<String, Object> indexSettings = (Map<String, Object>) newSettings.get("index");
if (!indexSettings.containsKey("analysis")) {
return true;
}
// Compare analysis section
Map<String, Object> newAnalysis = (Map<String, Object>) indexSettings.get("analysis");
Settings oldAnalysis = oldSettings.getByPrefix("index.analysis.");
if (!equalsGroup(newAnalysis, oldAnalysis)) {
return false;
}
// Compare remaining settings
return SETTINGS_TO_COMPARE.stream()
.noneMatch(settingKey -> Objects.equals(indexSettings.get(settingKey), oldSettings.get("index." + settingKey)));
}
private boolean equalsGroup(Map<String, Object> newSettings, Settings oldSettings) {
if (!newSettings.keySet().equals(oldSettings.names())) {
return false;
}
for (String key : newSettings.keySet()) {
if (newSettings.get(key) instanceof Map) {
if (!equalsGroup((Map<String, Object>) newSettings.get(key), oldSettings.getByPrefix(key + "."))) {
return false;
}
} else if (newSettings.get(key) instanceof List) {
if (!newSettings.get(key).equals(oldSettings.getAsList(key))) {
return false;
}
} else {
if (!newSettings.get(key).toString().equals(oldSettings.get(key))) {
return false;
}
}
}
return true;
}
} |
package org.opencps.api.controller.impl;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Base64;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.opencps.api.controller.DossierSyncManagement;
import org.opencps.api.controller.exception.ErrorMsg;
import org.opencps.api.controller.util.DossierSyncUtils;
import org.opencps.api.dossiersync.model.DossierSyncResultsModel;
import org.opencps.api.dossiersync.model.DossierSyncSendingModel;
import org.opencps.auth.api.BackendAuth;
import org.opencps.auth.api.BackendAuthImpl;
import org.opencps.auth.api.exception.NotFoundException;
import org.opencps.auth.api.exception.UnauthenticationException;
import org.opencps.auth.api.exception.UnauthorizationException;
import org.opencps.auth.api.keys.ActionKeys;
import org.opencps.dossiermgt.constants.DossierTerm;
import org.opencps.dossiermgt.model.Dossier;
import org.opencps.dossiermgt.model.DossierAction;
import org.opencps.dossiermgt.model.DossierFile;
import org.opencps.dossiermgt.model.DossierSync;
import org.opencps.dossiermgt.model.DossierTemplate;
import org.opencps.dossiermgt.model.PaymentFile;
import org.opencps.dossiermgt.model.ProcessAction;
import org.opencps.dossiermgt.model.ProcessStep;
import org.opencps.dossiermgt.scheduler.InvokeREST;
import org.opencps.dossiermgt.scheduler.RESTFulConfiguration;
import org.opencps.dossiermgt.service.DossierActionLocalServiceUtil;
import org.opencps.dossiermgt.service.DossierFileLocalServiceUtil;
import org.opencps.dossiermgt.service.DossierLocalServiceUtil;
import org.opencps.dossiermgt.service.DossierSyncLocalServiceUtil;
import org.opencps.dossiermgt.service.PaymentFileLocalServiceUtil;
import org.opencps.dossiermgt.service.ProcessActionLocalServiceUtil;
import org.opencps.dossiermgt.service.ProcessStepLocalServiceUtil;
import com.liferay.document.library.kernel.model.DLFileVersion;
import com.liferay.document.library.kernel.service.DLAppLocalServiceUtil;
import com.liferay.document.library.kernel.service.DLFileEntryLocalServiceUtil;
import com.liferay.document.library.kernel.service.DLFileVersionLocalServiceUtil;
import com.liferay.portal.kernel.dao.orm.QueryUtil;
import com.liferay.portal.kernel.exception.PortalException;
import com.liferay.portal.kernel.json.JSONObject;
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import com.liferay.portal.kernel.model.Company;
import com.liferay.portal.kernel.model.User;
import com.liferay.portal.kernel.repository.model.FileEntry;
import com.liferay.portal.kernel.service.ServiceContext;
import com.liferay.portal.kernel.servlet.HttpMethods;
import com.liferay.portal.kernel.util.GetterUtil;
import com.liferay.portal.kernel.util.StringPool;
import com.liferay.portal.kernel.util.Validator;
public class DossierSyncManagementImpl implements DossierSyncManagement {
private final String baseUrl = "http://localhost:8080/o/rest/v2/";
private final String username = "test@liferay.com";
private final String password = "test";
private final String serectKey = "OPENCPSV2";
@Override
public Response getDossierSyncs(HttpServletRequest request, HttpHeaders header, Company company, Locale locale,
User user, ServiceContext serviceContext, String serverNo) {
BackendAuth auth = new BackendAuthImpl();
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
if (!auth.hasResource(serviceContext, DossierTemplate.class.getName(), ActionKeys.ADD_ENTRY)) {
throw new UnauthorizationException();
}
List<DossierSync> dossierSyncs = DossierSyncLocalServiceUtil.fetchByServerNo(serverNo, QueryUtil.ALL_POS,
QueryUtil.ALL_POS);
DossierSyncResultsModel result = new DossierSyncResultsModel();
result.setTotal(dossierSyncs.size());
result.getData().addAll(DossierSyncUtils.mappingToData(dossierSyncs));
return Response.status(200).entity(result).build();
} catch (Exception e) {
ErrorMsg error = new ErrorMsg();
if (e instanceof UnauthenticationException) {
error.setMessage("Non-Authoritative Information.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Non-Authoritative Information.");
return Response.status(HttpURLConnection.HTTP_NOT_AUTHORITATIVE).entity(error).build();
} else {
if (e instanceof UnauthorizationException) {
error.setMessage("Unauthorized.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Unauthorized.");
return Response.status(HttpURLConnection.HTTP_UNAUTHORIZED).entity(error).build();
} else {
error.setMessage("Internal Server Error");
error.setCode(HttpURLConnection.HTTP_FORBIDDEN);
error.setDescription(e.getMessage());
return Response.status(HttpURLConnection.HTTP_INTERNAL_ERROR).entity(error).build();
}
}
}
}
@Override
public Response sendDossierSync(HttpServletRequest request, HttpHeaders header, Company company, Locale locale,
User user, ServiceContext serviceContext, long id) {
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
DossierSync dossierSync = DossierSyncLocalServiceUtil.fetchDossierSync(id);
DossierSyncSendingModel result = new DossierSyncSendingModel();
if (Validator.isNotNull(dossierSync)) {
// Get DOSSIER in CLIENT
Dossier dossier = DossierLocalServiceUtil.fetchDossier(dossierSync.getDossierId());
// Get the latest ACTION of DOSSIER has been done
long dossierActionId = Validator.isNotNull(dossier) ? dossierActionId = dossier.getDossierActionId()
: 0l;
if (dossierActionId != 0) {
DossierAction action = DossierActionLocalServiceUtil.fetchDossierAction(dossierActionId);
if (Validator.isNotNull(action)) {
callDossierSync(groupId, dossierSync.getMethod(), action.getSyncActionCode(),
action.getActionUser(), action.getActionNote(), 0l, dossier.getReferenceUid(),
dossierActionId, id, dossierSync.getDossierId(), dossierSync.getClassPK(),
dossierSync.getCreateDossier(), serviceContext);
} else {
throw new NotFoundException("DossierActionNotFound");
}
}
result = DossierSyncUtils.mappingToSending(dossierSync);
} else {
throw new NotFoundException("NotFoundDossierSync");
}
return Response.status(200).entity(result).build();
} catch (Exception e) {
ErrorMsg error = new ErrorMsg();
if (e instanceof UnauthenticationException) {
error.setMessage("Non-Authoritative Information.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Non-Authoritative Information.");
return Response.status(HttpURLConnection.HTTP_NOT_AUTHORITATIVE).entity(error).build();
} else {
if (e instanceof UnauthorizationException) {
error.setMessage("Unauthorized.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Unauthorized.");
return Response.status(HttpURLConnection.HTTP_UNAUTHORIZED).entity(error).build();
} else {
error.setMessage("Internal Server Error");
error.setCode(HttpURLConnection.HTTP_FORBIDDEN);
error.setDescription(e.getMessage());
return Response.status(HttpURLConnection.HTTP_INTERNAL_ERROR).entity(error).build();
}
}
}
}
private void callDossierSync(long groupId, int method, String actionCode, String actionUser, String actionNote,
long assignUserId, String refId, long clientDossierActionId, long dossierSyncId, long dossierId,
long classPK, boolean isCreateDossier, ServiceContext serviceContext) throws PortalException {
InvokeREST rest = new InvokeREST();
HashMap<String, String> properties = new HashMap<String, String>();
if (isCreateDossier) {
// Call initDossier to SERVER
String httpMethod = HttpMethods.POST;
String endPoint = "dossiers";
Map<String, Object> params = getParamsPostDossier(dossierSyncId);
JSONObject resPostDossier = rest.callPostAPI(groupId, httpMethod, "application/json",
RESTFulConfiguration.SERVER_PATH_BASE, endPoint, RESTFulConfiguration.SERVER_USER,
RESTFulConfiguration.SERVER_PASS, properties, params, serviceContext);
// reset creaetDossier flag
if (resPostDossier.getInt(RESTFulConfiguration.STATUS) == HttpURLConnection.HTTP_OK) {
DossierSyncLocalServiceUtil.shiftCreateDossierStatus(dossierSyncId);
}
}
// SyncAction
if (method == 0) {
String endPointSynAction = "dossiers/" + refId + "/actions";
String endPointSynDossierNo = "dossiers/" + refId + "/dossierno/" ;
Map<String, Object> params = new LinkedHashMap<>();
params.put("actionCode", actionCode);
params.put("actionUser", actionUser);
params.put("actionNote", actionNote);
params.put("assignUserId", assignUserId);
params.put("isSynAction", 1);
JSONObject resSynsActions = rest.callPostAPI(groupId, HttpMethods.POST, "application/json",
RESTFulConfiguration.SERVER_PATH_BASE, endPointSynAction, RESTFulConfiguration.SERVER_USER,
RESTFulConfiguration.SERVER_PASS, properties, params, serviceContext);
if (resSynsActions.getInt(RESTFulConfiguration.STATUS) == HttpURLConnection.HTTP_OK) {
// remove DossierSync
DossierSyncLocalServiceUtil.deleteDossierSync(dossierSyncId);
}
}
// SyncDossierFile
if (method == 1) {
// TODO add case update file
String endPointSyncDossierFile = "dossiers/" + refId + "/files";
DossierFile dossierFile = DossierFileLocalServiceUtil.getDossierFile(classPK);
properties.put("referenceUid", dossierFile.getReferenceUid());
properties.put("dossierTemplateNo", dossierFile.getDossierTemplateNo());
properties.put("dossierPartNo", dossierFile.getDossierPartNo());
properties.put("fileTemplateNo", dossierFile.getFileTemplateNo());
properties.put("displayName", dossierFile.getDisplayName());
properties.put("isSync", StringPool.TRUE);
properties.put("formData", dossierFile.getFormData());
FileEntry fileEntry = DLAppLocalServiceUtil.getFileEntry(dossierFile.getFileEntryId());
properties.put("fileType", fileEntry.getExtension());
/*
* File file =
* DLFileEntryLocalServiceUtil.getFile(dossierFile.getFileEntryId(),
* dlFileVersion.getVersion(), false);
*/
File file = getFile(dossierFile.getFileEntryId());
// TODO review extention file
JSONObject resSynFile = rest.callPostFileAPI(groupId, HttpMethods.POST, "application/json",
RESTFulConfiguration.SERVER_PATH_BASE, endPointSyncDossierFile, RESTFulConfiguration.SERVER_USER,
RESTFulConfiguration.SERVER_PASS, properties, file, serviceContext);
if (resSynFile.getInt(RESTFulConfiguration.STATUS) == HttpURLConnection.HTTP_OK) {
// remove DossierSync
DossierSyncLocalServiceUtil.deleteDossierSync(dossierSyncId);
// Reset isNew
dossierFile.setIsNew(false);
DossierFileLocalServiceUtil.updateDossierFile(dossierFile);
}
}
// SyncPaymentFile and paymentfile status
// Sync paymentFile
if (method == 2) {
DossierSync sync = DossierSyncLocalServiceUtil.getDossierSync(dossierSyncId);
String endPointSynAction = "dossiers/" + sync.getDossierReferenceUid() + "/payments";
PaymentFile paymentFileClient = PaymentFileLocalServiceUtil.fectPaymentFile(sync.getDossierId(), sync.getFileReferenceUid());
Map<String, Object> params = new LinkedHashMap<>();
params.put("referenceUid", paymentFileClient.getReferenceUid());
params.put("govAgencyCode", paymentFileClient.getGovAgencyCode());
params.put("govAgencyName", paymentFileClient.getGovAgencyName());
params.put("applicantName", StringPool.BLANK);
params.put("applicantIdNo", StringPool.BLANK);
params.put("paymentFee", paymentFileClient.getPaymentFee());
params.put("paymentAmount", paymentFileClient.getPaymentAmount());
params.put("paymentNote", paymentFileClient.getPaymentNote());
params.put("epaymentProfile", paymentFileClient.getEpaymentProfile());
params.put("bankInfo", paymentFileClient.getBankInfo());
// TODO update payload
params.put("invoicePayload", StringPool.BLANK);
JSONObject resSynFile = rest.callPostAPI(groupId, HttpMethods.POST, "application/json", RESTFulConfiguration.SERVER_PATH_BASE,
endPointSynAction, RESTFulConfiguration.SERVER_USER, RESTFulConfiguration.SERVER_PASS, properties,
params, serviceContext);
if (resSynFile.getInt(RESTFulConfiguration.STATUS) == HttpURLConnection.HTTP_OK) {
// remove DossierSync
DossierSyncLocalServiceUtil.deleteDossierSync(dossierSyncId);
// Reset isNew
paymentFileClient.setIsNew(false);
PaymentFileLocalServiceUtil.updatePaymentFile(paymentFileClient);
//DossierFileLocalServiceUtil.updateDossierFile(dossierFile);
}
}
// Sync paymentStatus
if (method == 3) {
PaymentFile paymentFileClient = PaymentFileLocalServiceUtil.fectPaymentFile(dossierSyncId, refId);
try {
File file = File.createTempFile(String.valueOf(System.currentTimeMillis()), StringPool.PERIOD + "tmp");
if (paymentFileClient.getInvoiceFileEntryId() != 0) {
// get invoice file
file = getFile(paymentFileClient.getInvoiceFileEntryId());
}
SimpleDateFormat format = new SimpleDateFormat("DD-MM-YYYY HH:MM:SS");
properties.put("approveDatetime", format.format(paymentFileClient.getApproveDatetime()));
properties.put("accountUserName", paymentFileClient.getAccountUserName());
properties.put("govAgencyTaxNo", paymentFileClient.getGovAgencyTaxNo());
properties.put("invoiceTemplateNo", paymentFileClient.getInvoiceTemplateNo());
properties.put("invoiceIssueNo", paymentFileClient.getInvoiceIssueNo());
properties.put("invoiceNo", paymentFileClient.getInvoiceNo());
String endPointSynAction = "dossiers/" + dossierId + "/payments/" + paymentFileClient.getReferenceUid()
+ "/approval";
JSONObject resSynFile = rest.callPostFileAPI(groupId, HttpMethod.PUT, "application/json", RESTFulConfiguration.SERVER_PATH_BASE,
endPointSynAction, RESTFulConfiguration.SERVER_USER, RESTFulConfiguration.SERVER_PASS,
properties, file, serviceContext);
if (resSynFile.getInt(RESTFulConfiguration.STATUS) == HttpURLConnection.HTTP_OK) {
// remove DossierSync
DossierSyncLocalServiceUtil.deleteDossierSync(dossierSyncId);
// Reset isNew
paymentFileClient.setIsNew(false);
PaymentFileLocalServiceUtil.updatePaymentFile(paymentFileClient);
//DossierFileLocalServiceUtil.updateDossierFile(dossierFile);
}
} catch (Exception e) {
// TODO: handle exception
}
}
// remove pending in DossierAction
int countDossierSync = DossierSyncLocalServiceUtil.countByGroupDossierId(groupId, dossierId);
if (countDossierSync == 0) {
DossierActionLocalServiceUtil.updatePending(clientDossierActionId, false);
}
}
protected Dossier getDossier(String id, long groupId) throws PortalException {
// TODO update logic here
long dossierId = GetterUtil.getLong(id);
Dossier dossier = null;
dossier = DossierLocalServiceUtil.fetchDossier(dossierId);
if (Validator.isNull(dossier)) {
dossier = DossierLocalServiceUtil.getByRef(groupId, id);
}
return dossier;
}
private File getFile(long fileEntryId){
File tempFile = null;
try {
FileEntry fileEntry = DLAppLocalServiceUtil.getFileEntry(fileEntryId);
DLFileVersion dlFileVersion = DLFileVersionLocalServiceUtil.getLatestFileVersion(fileEntry.getFileEntryId(),
true);
tempFile = File.createTempFile(String.valueOf(System.currentTimeMillis()), StringPool.PERIOD + fileEntry.getExtension());
InputStream io = DLFileEntryLocalServiceUtil.getFileAsStream(fileEntryId, dlFileVersion.getVersion());
OutputStream outStream = new FileOutputStream(tempFile);
byte[] buffer = new byte[8 * 1024];
int bytesRead;
while ((bytesRead = io.read(buffer)) != -1) {
outStream.write(buffer, 0, bytesRead);
}
io.close();
// flush OutputStream to write any buffered data to file
outStream.flush();
outStream.close();
} catch (Exception e) {
// TODO: handle exception
}
return tempFile;
}
private Map<String, Object> getParamsPostDossier(long dossierSyncId) throws PortalException {
Map<String, Object> params = new HashMap<String, Object>();
try {
long dossierId = DossierSyncLocalServiceUtil.getDossierSync(dossierSyncId).getDossierId();
Dossier dossier = DossierLocalServiceUtil.getDossier(dossierId);
params.put(DossierTerm.REFERENCE_UID, dossier.getReferenceUid());
params.put(DossierTerm.SERVICE_CODE, dossier.getServiceCode());
params.put(DossierTerm.GOV_AGENCY_CODE, dossier.getGovAgencyCode());
params.put(DossierTerm.DOSSIER_TEMPLATE_NO, dossier.getDossierTemplateNo());
params.put(DossierTerm.APPLICANT_NAME, dossier.getApplicantName());
params.put(DossierTerm.APPLICANT_ID_TYPE, dossier.getApplicantIdType());
params.put(DossierTerm.APPLICANT_ID_NO, dossier.getApplicantIdNo());
params.put(DossierTerm.APPLICANT_ID_DATE, dossier.getApplicantIdDate());
params.put(DossierTerm.ADDRESS, dossier.getAddress());
params.put(DossierTerm.CITY_CODE, dossier.getCityCode());
params.put(DossierTerm.DISTRICT_CODE, dossier.getDistrictCode());
params.put(DossierTerm.WARD_CODE, dossier.getWardCode());
params.put(DossierTerm.CONTACT_NAME, dossier.getContactName());
params.put(DossierTerm.CONTACT_TEL_NO, dossier.getContactTelNo());
params.put(DossierTerm.CONTACT_EMAIL, dossier.getContactEmail());
params.put(DossierTerm.PASSWORD, dossier.getPassword());
params.put(DossierTerm.ONLINE, dossier.getOnline());
params.put(DossierTerm.NOTIFICATION, dossier.getNotification());
params.put(DossierTerm.APPLICANT_NOTE, dossier.getApplicantNote());
params.put(DossierTerm.VIA_POSTAL, dossier.getViaPostal());
params.put(DossierTerm.POSTAL_ADDRESS, dossier.getPostalAddress());
params.put(DossierTerm.POSTAL_CITY_CODE, dossier.getPostalCityCode());
params.put(DossierTerm.POSTAL_TEL_NO, dossier.getPostalTelNo());
} catch (Exception e) {
throw new PortalException("DossierNotFound");
}
return params;
}
@Deprecated
private void doSync(long groupId, String actionCode, String actionUser, String actionNote, long assignUserId,
String refId, long clientDossierActionId, long dossierSyncId) {
try {
String path = "dossiers/" + refId + "/actions";
URL url = new URL(baseUrl + path);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
String authString = username + ":" + password;
String authStringEnc = new String(Base64.getEncoder().encodeToString(authString.getBytes()));
conn.setRequestProperty("Authorization", "Basic " + authStringEnc);
conn.setRequestMethod(HttpMethods.POST);
conn.setDoInput(true);
conn.setDoOutput(true);
conn.setRequestProperty("Accept", "application/json");
conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
conn.setRequestProperty("groupId", String.valueOf(groupId));
Map<String, Object> params = new LinkedHashMap<>();
params.put("actionCode", actionCode);
params.put("actionUser", actionUser);
params.put("actionNote", actionNote);
params.put("assignUserId", assignUserId);
params.put("isSynAction", 1);
StringBuilder postData = new StringBuilder();
for (Map.Entry<String, Object> param : params.entrySet()) {
if (postData.length() != 0)
postData.append('&');
postData.append(java.net.URLEncoder.encode(param.getKey(), "UTF-8"));
postData.append('=');
postData.append(java.net.URLEncoder.encode(String.valueOf(param.getValue()), "UTF-8"));
}
byte[] postDataBytes = postData.toString().getBytes("UTF-8");
conn.setRequestProperty("Content-Length", String.valueOf(postDataBytes.length));
conn.getOutputStream().write(postDataBytes);
if (conn.getResponseCode() != 200) {
throw new RuntimeException("Failed : HTTP error code : " + conn.getResponseCode());
} else {
try {
// remove flag pending
DossierActionLocalServiceUtil.updatePending(clientDossierActionId, false);
// remove DOSSIER_SYNC
DossierSyncLocalServiceUtil.deleteDossierSync(dossierSyncId);
} catch (Exception e) {
// TODO: handle exception
}
}
BufferedReader br = new BufferedReader(new InputStreamReader((conn.getInputStream())));
String output;
StringBuffer sb = new StringBuffer();
while ((output = br.readLine()) != null) {
sb.append(output);
}
System.out.println(sb.toString());
conn.disconnect();
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
protected ProcessAction getProcessAction(long groupId, long dossierId, String refId, String actionCode,
long serviceProcessId) throws PortalException {
ProcessAction action = null;
try {
List<ProcessAction> actions = ProcessActionLocalServiceUtil.getByActionCode(groupId, actionCode);
Dossier dossier = getDossier(groupId, dossierId, refId);
String dossierStatus = dossier.getDossierStatus();
for (ProcessAction act : actions) {
String preStepCode = act.getPreStepCode();
ProcessStep step = ProcessStepLocalServiceUtil.fetchBySC_GID(preStepCode, groupId, serviceProcessId);
if (Validator.isNotNull(step)) {
if (step.getDossierStatus().equalsIgnoreCase(dossierStatus)) {
action = act;
break;
}
} else {
action = act;
break;
}
}
} catch (Exception e) {
throw new NotFoundException("NotProcessActionFound");
}
return action;
}
protected Dossier getDossier(long groupId, long dossierId, String refId) throws PortalException {
Dossier dossier = null;
if (dossierId != 0) {
dossier = DossierLocalServiceUtil.fetchDossier(dossierId);
} else {
dossier = DossierLocalServiceUtil.getByRef(groupId, refId);
}
return dossier;
}
private void resetDossier(long groupId, String refId) {
try {
String path = "dossiers/" + refId + "/reset";
URL url = new URL(baseUrl + path);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
String authString = username + ":" + password;
String authStringEnc = new String(Base64.getEncoder().encodeToString(authString.getBytes()));
conn.setRequestProperty("Authorization", "Basic " + authStringEnc);
conn.setRequestMethod(HttpMethods.GET);
conn.setDoInput(true);
conn.setDoOutput(true);
conn.setRequestProperty("Accept", "application/json");
conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
conn.setRequestProperty("groupId", String.valueOf(groupId));
if (conn.getResponseCode() != 200) {
throw new RuntimeException("Failed : HTTP error code : " + conn.getResponseCode());
}
BufferedReader br = new BufferedReader(new InputStreamReader((conn.getInputStream())));
String output;
StringBuffer sb = new StringBuffer();
while ((output = br.readLine()) != null) {
sb.append(output);
}
System.out.println(sb.toString());
conn.disconnect();
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
Log _log = LogFactoryUtil.getLog(DossierSyncManagementImpl.class.getName());
} |
package misc;
import com.valkryst.VMVC.Settings;
import javafx.scene.control.Alert;
import javafx.scene.control.ButtonType;
import lombok.Data;
import lombok.NonNull;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import javax.swing.filechooser.FileSystemView;
import java.io.File;
import java.io.IOException;
import java.util.*;
@Data
public class JobBuilder {
/** The program settings. */
private Settings settings;
/** The name of the Job. */
private String name;
/** The output directory. */
private String outputDirectory;
/** The file(s) belonging to the Job.*/
private List<File> files;
/** Whether the Job is an Encode Job or a Decode Job. */
private boolean isEncodeJob = true;
/**
* Constructs a new JobBuilder.
*
* @param settings
* The program settings.
*
* @throws NullPointerException
* If the settings is null.
*/
public JobBuilder(final @NonNull Settings settings) {
this.settings = settings;
reset();
}
/**
* Build a new job.
*
* @return
* The job.
*/
public Job build() {
checkState();
return new Job(this);
}
private void checkState() {
Objects.requireNonNull(outputDirectory);
Objects.requireNonNull(files);
if (name == null || name.isEmpty()) {
final UUID uuid = UUID.randomUUID();
name = uuid.toString();
}
// If the output dir isn't specified, try setting it to the home dir.
if (outputDirectory.isEmpty()) {
setOutputToHomeDirectory();
if (outputDirectory.isEmpty()) {
throw new NullPointerException("The output directory was not set.");
}
}
// Ensure output directory has the correct trailing slash:
if (!outputDirectory.endsWith("\\") && !outputDirectory.endsWith("/")) {
outputDirectory += "/";
}
// Ensure the output directory is actually a directory:
final File outputDirectory = new File(this.outputDirectory);
if (! outputDirectory.exists()) {
if (! outputDirectory.mkdir()) {
throw new IllegalArgumentException("The output directory '" + this.outputDirectory + "' does not exist and could not be created.");
}
}
if (! outputDirectory.isDirectory()) {
throw new IllegalArgumentException("The output directory '" + this.outputDirectory + "' is not a directory.");
}
}
/** Resets the state of the builder. */
public void reset() {
name = null;
setOutputToHomeDirectory();
files = new ArrayList<>();
isEncodeJob = true;
}
/** Sets the output directory to the home directory. */
private void setOutputToHomeDirectory() {
if (isEncodeJob) {
final String defaultEncodeDir = settings.getStringSetting("Default Encoding Output Directory");
if (defaultEncodeDir.isEmpty() == false) {
final File file = new File(defaultEncodeDir);
if (file.exists() && file.isDirectory()) {
outputDirectory = defaultEncodeDir;
return;
}
}
} else {
final String defaultDecodeDir = settings.getStringSetting("Default Decoding Output Directory");
if (defaultDecodeDir.isEmpty() == false) {
final File file = new File(defaultDecodeDir);
if (file.exists() && file.isDirectory()) {
outputDirectory = defaultDecodeDir;
return;
}
}
}
try {
final File home = FileSystemView.getFileSystemView().getHomeDirectory();
outputDirectory = home.getCanonicalPath() + "/";
} catch (final IOException e) {
final Logger logger = LogManager.getLogger();
logger.error(e);
final String alertMessage = "There was an issue retrieving the home directory path.\nSee the log file for more information.";
final Alert alert = new Alert(Alert.AlertType.ERROR, alertMessage, ButtonType.OK);
alert.showAndWait();
}
}
} |
package org.genericsystem.cv;
import org.apache.commons.pool2.BasePooledObjectFactory;
import org.apache.commons.pool2.PooledObject;
import org.apache.commons.pool2.impl.DefaultPooledObject;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.genericsystem.cv.utils.NativeLibraryLoader;
import org.opencv.core.Mat;
import org.opencv.text.OCRTesseract;
public class Ocr {
static {
NativeLibraryLoader.load();
}
// Get the OcrTesseract instance from the tesseractInstancePool to prevent multi-threading problems
private static final GenericObjectPool<OCRTesseract> tesseractInstancePool = new GenericObjectPool<>(new OCRTesseractInstanceFactory(), Ocr.buildPoolConfig());
private static final String TESSDATA_PATH = "/usr/share/tesseract-ocr/4.00/";
private static final String TESSDATA_ALT_PATH = System.getenv("TESSDATA_PREFIX");
private static final String TESSERACT_LANGUAGE = "fra";
private static final String TESSERACT_CHAR_WHITE_LIST = "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM0123456789.-,<>!?;éèàçÉÈÀÇ€£$œ'";
private static final int TESSERACT_OEM = 1;
private static final int TESSERACT_PSMODE = 13;
/**
* Internal factory class used to create a tesseractInstancePool of {@link OCRTesseract} instances. Otherwise, segmentation fault can occur when the instance of tesseract is shared accross multiple threads.
*
* @author Pierrik Lassalas
*/
public static class OCRTesseractInstanceFactory extends BasePooledObjectFactory<OCRTesseract> {
@Override
public OCRTesseract create() throws Exception {
OCRTesseract instance = null;
try {
// Attempt to load tessdata from the default path (when installed from official repository)
instance = OCRTesseract.create(Ocr.TESSDATA_PATH, Ocr.TESSERACT_LANGUAGE, Ocr.TESSERACT_CHAR_WHITE_LIST, Ocr.TESSERACT_OEM, Ocr.TESSERACT_PSMODE);
} catch (Exception e) {
// If tessdata was not found, attempt to load from the alternate path
try {
instance = OCRTesseract.create(Ocr.TESSDATA_ALT_PATH, Ocr.TESSERACT_LANGUAGE, Ocr.TESSERACT_CHAR_WHITE_LIST, Ocr.TESSERACT_OEM, Ocr.TESSERACT_PSMODE);
} catch (Exception e1) {
throw new RuntimeException("Unable to load tesseract data. Please ensure that tesseract-ocr is installed and configured properly on your system.", e);
}
}
return instance;
}
@Override
public PooledObject<OCRTesseract> wrap(OCRTesseract instance) {
return new DefaultPooledObject<>(instance);
}
}
public static String doWork(Mat mat) {
return doWork(mat, 0);
}
public static String doWork(Mat mat, int minConfidence) {
OCRTesseract instance = null;
String ocrText = null;
try {
instance = tesseractInstancePool.borrowObject();
ocrText = instance.run(mat, minConfidence, 1).replace("\n", "").trim();
} catch (Exception e) {
throw new RuntimeException("An error has occured during the OCR", e);
} finally {
// If the instance was retrieved, return it to the tesseractInstancePool
if (instance != null)
tesseractInstancePool.returnObject(instance);
}
return ocrText;
}
private static GenericObjectPoolConfig buildPoolConfig() {
GenericObjectPoolConfig config = new GenericObjectPoolConfig();
config.setMaxTotal(Runtime.getRuntime().availableProcessors());
config.setBlockWhenExhausted(true);
config.setMaxWaitMillis(30_000);
return config;
}
} |
package com.bouye.gw2.sab.query;
import api.web.gw2.mapping.core.APILevel;
import api.web.gw2.mapping.core.CoinAmount;
import api.web.gw2.mapping.core.EnumValueFactory;
import api.web.gw2.mapping.v2.account.Account;
import api.web.gw2.mapping.v2.characters.Character;
import api.web.gw2.mapping.v2.account.AccountAccessType;
import api.web.gw2.mapping.v2.characters.CharacterProfession;
import api.web.gw2.mapping.v2.items.Item;
import api.web.gw2.mapping.v2.items.ItemRarity;
import api.web.gw2.mapping.v2.items.ItemType;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Properties;
import java.util.function.Function;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Test;
import org.junit.Before;
import org.junit.BeforeClass;
import static org.hamcrest.CoreMatchers.is;
public class APITest {
public APITest() {
}
private static final Properties SETTINGS = new Properties();
@BeforeClass
public static void setUpClass() throws IOException {
final File file = new File("settings.properties"); // NOI18N.
assertTrue(file.exists());
assertTrue(file.canRead());
try (final InputStream input = new FileInputStream(file)) {
SETTINGS.load(input);
}
assertNotNull(SETTINGS.getProperty("app.key")); // NOI18N.
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
@Test
public void testAccount() {
System.out.println("testAccount");
final String expId = SETTINGS.getProperty("account.id"); // NOI18N.
final String expName = SETTINGS.getProperty("account.name"); // NOI18N.
final ZonedDateTime expCreated = ZonedDateTime.parse(SETTINGS.getProperty("account.created")); // NOI18N.
final int expWorld = Integer.parseInt(SETTINGS.getProperty("account.world")); // NOI18N.
final AccountAccessType expAccess = EnumValueFactory.INSTANCE.mapEnumValue(AccountAccessType.class, SETTINGS.getProperty("account.access")); // NOI18N.
final boolean expCommander = Boolean.parseBoolean(SETTINGS.getProperty("account.commander")); // NOI18N.
final int expFractal = Integer.parseInt(SETTINGS.getProperty("account.fractal_level")); // NOI18N.
assertNotNull(expId);
assertNotNull(expName);
final Optional<Account> value = GW2APIClient.create()
.applicationKey(SETTINGS.getProperty("app.key")) // NOI18N.
.apiLevel(APILevel.V2)
.endPoint("account") // NOI18N.
.queryObject(Account.class);
assertTrue(value.isPresent());
assertEquals(expId, value.get().getId());
assertEquals(expName, value.get().getName());
assertEquals(expCreated, value.get().getCreated());
assertEquals(expWorld, value.get().getWorld());
assertEquals(expAccess, value.get().getAccess());
assertEquals(expCommander, value.get().isCommander());
assertEquals(expFractal, value.get().getFractalLevel());
}
@Test
public void testCharacters() {
System.out.println("testCharacters");
final List<String> expNames = Arrays.asList(SETTINGS.getProperty("characters.names").split(",")); // NOI18N.
final List<String> value = GW2APIClient.create()
.applicationKey(SETTINGS.getProperty("app.key")) // NOI18N.
.apiLevel(APILevel.V2)
.endPoint("characters") // NOI18N.
.queryArray(String.class);
// Order is not important.
assertThat(new HashSet<>(value), is(new HashSet<>(expNames)));
}
@Test
public void testCharacter() {
System.out.println("testCharacter");
final String expName = SETTINGS.getProperty("character.name"); // NOI18N.
final int expLevel = Integer.parseInt(SETTINGS.getProperty("character.level")); // NOI18N.
final CharacterProfession expProfession = EnumValueFactory.INSTANCE.mapEnumValue(CharacterProfession.class, SETTINGS.getProperty("character.profession")); // NOI18N.
assertNotNull(expName);
final String id = SETTINGS.getProperty("character.name"); // NOI18N.
final Optional<Character> value = GW2APIClient.create()
.applicationKey(SETTINGS.getProperty("app.key")) // NOI18N.
.apiLevel(APILevel.V2)
.endPoint("characters") // NOI18N.
.id(id)
.queryObject(Character.class);
assertTrue(value.isPresent());
assertEquals(expName, value.get().getName());
assertEquals(expLevel, value.get().getLevel());
assertEquals(expProfession, value.get().getProfession());
}
@Test
public void testItems() {
System.out.println("testItems"); // NOI18N.
final int[] ids = Arrays.stream(SETTINGS.getProperty("items.ids").split(",")) // NOI18N.
.mapToInt(Integer::parseInt)
.toArray();
Arrays.stream(ids)
.forEach(this::testItem);
}
private void testItem(final int idToTest) {
System.out.printf("testItem(%d)%n", idToTest); // NOI18N.
final String prefix = String.format("item.%d.", idToTest); // NOI18N.
final int expId = Integer.parseInt(SETTINGS.getProperty(prefix + "id")); // NOI18N.
final String expName = SETTINGS.getProperty(prefix + "name"); // NOI18N.
final Optional<String> expDescription = getOptional(prefix + "description", value -> value); // NOI18N.
final ItemType expType = EnumValueFactory.INSTANCE.mapEnumValue(ItemType.class, SETTINGS.getProperty(prefix + "type")); // NOI18N.
final int expLevel = Integer.parseInt(SETTINGS.getProperty(prefix + "level")); // NOI18N.
final ItemRarity expRarity = EnumValueFactory.INSTANCE.mapEnumValue(ItemRarity.class, SETTINGS.getProperty(prefix + "rarity")); // NOI18N.\
final CoinAmount expVendorValue = CoinAmount.ofCopper(Integer.parseInt(SETTINGS.getProperty(prefix + "vendor_value"))); // NOI18N.)
final OptionalInt expDefaultSkin = getOptionalInt(prefix + "default_skin"); // NOI18N.
assertNotNull(expName);
final String lang = SETTINGS.getProperty("lang"); // NOI18N.
final Optional<Item> value = GW2APIClient.create()
.apiLevel(APILevel.V2)
.endPoint("items") // NOI18N.
.language(lang)
.id(idToTest)
.queryObject(Item.class);
assertTrue(value.isPresent());
assertEquals(expId, value.get().getId());
assertEquals(expName, value.get().getName());
assertEquals(expDescription, value.get().getDescription());
assertEquals(expType, value.get().getType());
assertEquals(expLevel, value.get().getLevel());
assertEquals(expRarity, value.get().getRarity());
assertEquals(expVendorValue, value.get().getVendorValue());
assertEquals(expDefaultSkin, value.get().getDefaultSkin());
}
private <T> Optional<T> getOptional(final String property, Function<String, T> converter) {
final String value = SETTINGS.getProperty(property);
return (value == null) ? Optional.empty() : Optional.of(converter.apply(value));
}
private OptionalInt getOptionalInt(final String property) {
final String value = SETTINGS.getProperty(property);
return (value == null) ? OptionalInt.empty() : OptionalInt.of(Integer.parseInt(value));
}
} |
package water.rapids;
import water.*;
import water.fvec.*;
import water.nbhm.NonBlockingHashMap;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
/** plyr's ddply: GroupBy by any other name.
* Sample AST: (h2o.ddply $frame {1;5;10} $fun)
*
* First arg is the frame we'll be working over.
* Second arg is column selection to group by.
* Third arg is the function to apply to each group.
*/
public class ASTddply extends ASTOp {
long[] _cols;
String _fun;
AST[] _fun_args;
static final String VARS[] = new String[]{ "ary", "{cols}", "FUN"};
public ASTddply( ) { super(VARS); }
@Override String opStr(){ return "h2o.ddply";}
@Override ASTOp make() {return new ASTddply();}
@Override ASTddply parse_impl(Exec E) {
// get the frame to work
AST ary = E.parse();
// Get the col ids
AST s=E.parse();
if( s instanceof ASTLongList) _cols = ((ASTLongList)s)._l;
else if( s instanceof ASTNum) _cols = new long[]{(long)((ASTNum)s)._d};
else throw new IllegalArgumentException("Columns expected to be a llist or number. Got: " + s.getClass());
// get the fun
_fun = ((ASTId)E.parse())._id;
// get any fun args
ArrayList<AST> fun_args = new ArrayList<>();
while( !E.isEnd() )
fun_args.add(E.parse());
if (fun_args.size() > 0) {
_fun_args = fun_args.toArray(new AST[fun_args.size()]);
} else {
_fun_args = null;
}
E.eatEnd();
ASTddply res = (ASTddply)clone();
res._asts = new AST[]{ary};
return res;
}
@Override void apply(Env env) {
Frame fr = env.popAry(); // The Frame to work on
// sanity check cols
for (long l : _cols) {
if (l > fr.numCols() || l < 0) throw new IllegalArgumentException("Column "+(l+1)+" out of range for frame columns "+fr.numCols());
}
// Was pondering a SIMD-like execution model, running the fcn "once" - but
// in parallel for all groups. But this isn't going to work: each fcn
// execution will take different control paths. Also the functions side-
// effects' must only happen once, and they will make multiple passes over
// the Frame passed in.
// GroupIDs' can vary from 1 group to 1-per-row. Are formed by the cross-
// product of the selection cols. Will be hashed to find Group - NBHML
// mapping row-contents to group. Index is a sample row. NBHML per-node,
// plus roll-ups. Result/Value is Group structure pointing to NewChunks
// holding row indices.
// Pass 1: Find Groups.
// Build a NBHSet of unique double[]'s holding selection cols.
// These are the unique groups, found per-node, rolled-up globally
// Record the rows belonging to each group, locally.
// ddplyPass1 p1 = new ddplyPass1(true,_cols).doAll(fr);
// End up building a "transient" Frame for each group anyhow.
// So finding the groups and the size of each group is relatively cheap!
// pass1A, finds the number of groups and the size of each group, as well as the row numbers for each group (stashed inside of a nbhm instead of newchunks...)
Pass1A p1a = new Pass1A(_cols).doAll(fr); // pass 1 over all data
Group[] grps = p1a._grps.keySet().toArray(new Group[p1a._grps.size()]);
int ngrps = grps.length;
while( grps[ngrps-1] == null ) ngrps--; // chop out any null groups hanging at the end.
Group[] groups = new Group[ngrps];
System.arraycopy(grps,0,groups,0,ngrps);
grps = groups;
// pass2 here does the nominal work of building all of the groups.
// for lots of tiny groups, this is probably lots of data transfer
// this chokes the H2O cloud and can even cause it to OOM!
// this issue is addressed by ASTGroupBy
Pass2 p2;
H2O.submitTask(p2=new Pass2(fr,grps)).join();
// Pass 3: Send Groups 'round the cluster
Key[] groupFrames = p2._keys;
Pass3 p3;
(p3 = new Pass3(groupFrames,ASTOp.get(_fun).make(), grps,_fun_args)).go();
Vec layoutVec = Vec.makeZero(p3._remoteTasks.length);
final RemoteRapids[] results = p3._remoteTasks;
for( int k=0;k<p2._tasks.length;++k ) {
for(Key key: p2._tasks[k]._subsetVecKeys) Keyed.remove(key); // remove all of the subset vecs
}
int nonnull=-1;
for(int i=0; i<results.length; ++i) {
results[i] = results[i]._result==null?null:results[i];
if(results[i]!=null)nonnull=i;
}
if( nonnull==-1 ) { env.pushAry(new Frame(Vec.makeCon(0, 0))); return; }
final int ncols = results[nonnull]._result.length;
String names[] = new String[ncols];
String[][] domains = new String[ncols][];
int i=0;
for(;i<_cols.length;) {
names[i] = fr.names()[(int)_cols[i]];
domains[i] = fr.domains()[(int)_cols[i++]];
}
int j=1;
for(;i<ncols;) {
names[i++] = "C"+j++;
}
Frame fr2 = new MRTask() {
@Override public void map(Chunk[] c, NewChunk[] nc) {
int start = (int)c[0].start();
double d;
for(int i=0;i<c[0]._len;++i) {
if( results[i+start]==null ) continue;
d = results[i+start]._result[nc.length-1];
if( Double.isNaN(d) ) continue; // skip NA group results
for(int j=0;j<nc.length;++j)
nc[j].addNum(results[i+start]._result[j]);
}
}
}.doAll(ncols, layoutVec).outputFrame(names,domains);
layoutVec.remove();
env.pushAry(fr2);
}
// Group description: unpacked selected double columns
public static class Group extends ASTGroupBy.G {
public Group() { super(); }
public Group(int len) { super(len); a=new IcedHMIntS<>(); }
public Group( double ds[] ) { super(ds); }
IcedHMIntS<Integer,String> a;
}
private static class Pass1A extends MRTask<Pass1A> {
private final long _gbCols[];
IcedHM<Group,String> _grps;
Pass1A(long[] cols) { _gbCols=cols; }
@Override public void setupLocal() { _grps = new IcedHM<>(); }
@Override public void map(Chunk[] c) {
Group g = new Group(_gbCols.length);
Group gOld;
int start = (int)c[0].start();
for(int i=0;i<c[0]._len;++i) {
g.fill(i,c,_gbCols);
String old_g = _grps.putIfAbsent(g, "");
if( old_g==null ) {
gOld=g;
g= new Group(_gbCols.length);
} else {
gOld=_grps.getk(g);
if( gOld==null )
while( gOld==null ) gOld=_grps.getk(g);
}
long cnt=gOld._N;
while( !Group.CAS_N(gOld,cnt,cnt+1))
cnt=gOld._N;
gOld.a.put(start+i,"");
}
}
@Override public void reduce(Pass1A t) {
if( _grps!= t._grps ) {
IcedHM<Group,String> l = _grps;
IcedHM<Group,String> r = t._grps;
if( l.size() < r.size() ) { l=r; r=_grps; }
for( Group rg: r.keySet() ) {
if( l.containsKey(rg) ) { // try to add it to the set on the left.. if left already has it, then combine
Group lg = l.getk(rg);
long L = lg._N;
while(!Group.CAS_N(lg,L,L+rg._N))
L = lg._N;
}
}
_grps=l;
t._grps=null;
}
}
}
private static class Pass2 extends H2O.H2OCountedCompleter<Pass2> {
private final Frame _fr;
private final Group[] _grps;
Pass2(Frame f, Group[] grps) { _fr=f; _grps=grps; }
Pass2Task[] _tasks; // want to get out _key from each Pass2Task
Key[] _keys;
@Override protected void compute2() {
addToPendingCount(_grps.length-1);
// build subset vecs for each group...
int numnodes = H2O.CLOUD.size();
_tasks=new Pass2Task[_grps.length];
_keys=new Key[_grps.length];
for( int i=0;i<_grps.length;++i ) {
(_tasks[i]=new Pass2Task(this,i%numnodes,_grps[i],_fr._key)).fork();
_keys[i] = _tasks[i]._key;
}
}
}
private static class Pass2Task extends H2O.H2OCountedCompleter<Pass2Task> {
// round robin spread these Vecs
private final int _nodeID;
private final Group _g;
private final Key _frameKey;
// group frame key
Key _key;
H2ONode _n;
Key[] _subsetVecKeys;
Pass2Task(H2O.H2OCountedCompleter cc, int nodeID, Group g, Key frameKey) { super(cc); _nodeID=nodeID; _g=g; _frameKey=frameKey; _n=H2O.CLOUD.members()[_nodeID]; _key=Key.make(_n); }
@Override protected void compute2() {
H2ONode n = H2O.CLOUD.members()[_nodeID];
Futures fs = new Futures();
long[] rows = new long[_g.a.size()];
int i=0;
for(long l: _g.a.keySet() ) rows[i++]=l;
BuildGroup b;
fs.add(RPC.call(n, b=new BuildGroup(_key,rows,_frameKey)));
fs.blockForPending();
_subsetVecKeys = b._subsetVecKeys;
tryComplete();
}
}
private static class BuildGroup extends DTask<BuildGroup> implements Freezable {
private final Key _frameKey; // the frame key
private final Key _key; // this is the Vec key for the rows for the group...
private final long[] _rows; // these are the rows numbers for the group
private Key[] _subsetVecKeys;
BuildGroup(Key key, long[] rows, Key frameKey) {
_key=key;
_rows=rows;
_frameKey=frameKey;
// Always 1 higher priority than calling thread... because the caller will
// block & burn a thread waiting for this MRTask to complete.
Thread cThr = Thread.currentThread();
_priority = (byte)((cThr instanceof H2O.FJWThr) ? ((H2O.FJWThr)cThr)._priority+1 : super.priority());
}
final private byte _priority;
@Override public byte priority() { return _priority; }
@Override protected void compute2() {
assert _key.home() : "Key was not homed to this node!";
Futures fs = new Futures();
// get a layout Vec just for the vector group
Vec layout = Vec.makeZero(_rows.length);
Key key = layout.group().addVec(); // get a new key
layout.remove();
// create the vec of rows numbers
AppendableVec v = new AppendableVec(key);
NewChunk n = new NewChunk(v, 0);
for(long l: _rows) n.addNum(l);
n.close(0, fs);
Vec rows = v.close(fs); // this puts into the DKV!
fs.blockForPending();
Frame f = DKV.getGet(_frameKey); // fetch the Frame we're subsetting
Vec[] data = f.vecs(); // Full data columns
Vec[] gvecs = new Vec[data.length]; // the group vecs, all aligned with the rows Vec
Key[] keys = rows.group().addVecs(data.length); // generate keys from the vector group...
_subsetVecKeys = keys; // store these for later removal...
// loop over and subset each column, ...one at a time...
for (int c = 0; c < data.length; c++) {
gvecs[c] = new SubsetVec(keys[c], rows.get_espc(), data[c]._key, rows._key);
gvecs[c].setDomain(data[c].domain());
DKV.put(gvecs[c]._key, gvecs[c]);
}
// finally put the constructed group into the DKV
Frame aa = new Frame(_key, f._names, gvecs);
DKV.put(_key,aa); // _key is homed to this node!
assert _key.home(): "Key should be homed to the node! Somehow remapped during this compute2.";
assert DKV.getGet(_key) !=null;
tryComplete();
}
}
private static class Pass3 {
private final Key[] _frameKeys;
private final ASTOp _FUN;
private final Group[] _grps;
private final AST[] _funArgs;
RemoteRapids[] _remoteTasks;
Pass3(Key[] frameKeys, ASTOp FUN, Group[] grps, AST[] args) {
_frameKeys=frameKeys; _FUN=FUN; _grps=grps; _funArgs=args;
_remoteTasks=new RemoteRapids[_frameKeys.length]; // gather up the remote tasks...
}
// stupid single threaded pass over all groups...
private void go() {
Futures fs = new Futures();
for( int i=0;i<_frameKeys.length;++i) {
assert DKV.getGet(_frameKeys[i]) !=null : "Frame #" + i + " was NULL: " + _frameKeys[i];
fs.add(RPC.call(_frameKeys[i].home_node(), _remoteTasks[i] = new RemoteRapids(_frameKeys[i], _FUN, _funArgs, _grps[i]._ds)));
}
fs.blockForPending();
}
}
private static class RemoteRapids extends DTask<RemoteRapids> implements Freezable {
private final Key _frameKey; // the group to process...
private final ASTOp _FUN; // the ast to execute on the group
private final AST[] _funArgs; // any additional arguments to the _FUN
private final double[] _ds; // the "group" itself
private double[] _result; // result is 1 row per group!
RemoteRapids(Key frameKey, ASTOp FUN, AST[] args, double[] ds) {
_frameKey=frameKey; _FUN=FUN; _funArgs=args; _ds=ds;
// Always 1 higher priority than calling thread... because the caller will
// block & burn a thread waiting for this MRTask to complete.
Thread cThr = Thread.currentThread();
_priority = (byte)((cThr instanceof H2O.FJWThr) ? ((H2O.FJWThr)cThr)._priority+1 : super.priority());
}
final private byte _priority;
@Override public byte priority() { return _priority; }
@Override public void compute2() {
assert _frameKey.home();
Env e = Env.make(new HashSet<Key>());
Frame groupFrame = DKV.getGet(_frameKey);
assert groupFrame!=null : "Frame ID: " + _frameKey;
AST[] args = new AST[_funArgs==null?1:_funArgs.length+1];
args[0] = new ASTFrame(groupFrame);
if( _funArgs!=null ) System.arraycopy(_funArgs,0,args,1,_funArgs.length);
_FUN.make().exec(e,args);
if( !e.isNul() ) {
// grab up the results
Frame fr = null;
if (e.isAry() && (fr = e.popAry()).numRows() != 1)
throw new IllegalArgumentException("Result of ddply can only return 1 row but instead returned " + fr.numRows());
int ncols = fr == null ? 1 : fr.numCols();
_result = new double[_ds.length + ncols]; // fill in the results
System.arraycopy(_ds, 0, _result, 0, _ds.length);
int j = _ds.length;
for (int i = 0; i < ncols; ++i) {
if (e.isStr()) _result[j++] = e.popStr().equals("TRUE") ? 1 : 0;
else if (e.isNum()) _result[j++] = e.popDbl();
else if (fr != null) _result[j++] = fr.vecs()[i].at(0);
}
}
groupFrame.delete();
tryComplete();
}
}
// custom serializer for <Group,String> pairs
private static class IcedHM<G extends Iced,S extends String> extends Iced {
private NonBlockingHashMap<G,String> _m; // the nbhm to (de)ser
IcedHM() { _m = new NonBlockingHashMap<>(); }
String putIfAbsent(G k, S v) { return _m.putIfAbsent(k,v);}
void put(G g, S i) { _m.put(g,i);}
void putAll(IcedHM<G,S> m) {_m.putAll(m._m);}
boolean containsKey(G k) { return _m.containsKey(k); }
Set<G> keySet() { return _m.keySet(); }
int size() { return _m.size(); }
String get(G g) { return _m.get(g); }
G getk(G g) { return _m.getk(g); }
@Override public AutoBuffer write_impl(AutoBuffer ab) {
if( _m==null || _m.size()==0 ) return ab.put4(0);
else {
ab.put4(_m.size());
for(G g:_m.keySet()) { ab.put(g); ab.putStr(_m.get(g)); }
}
return ab;
}
@Override public IcedHM read_impl(AutoBuffer ab) {
int mLen;
if( (mLen=ab.get4())!=0 ) {
_m = new NonBlockingHashMap<>();
for( int i=0;i<mLen;++i ) _m.put((G)ab.get(), ab.getStr());
}
return this;
}
}
// custom serializer for <Integer,String> pairs
private static class IcedHMIntS<I extends Integer,S extends String> extends Iced {
private NonBlockingHashMap<Integer,String> _m; // the nbhm to (de)ser
IcedHMIntS() { _m = new NonBlockingHashMap<>(); }
String putIfAbsent(I k, S v) { return _m.putIfAbsent(k,v);}
void put(I g, S i) { _m.put(g,i);}
void putAll(IcedHMIntS<I,S> m) {_m.putAll(m._m);}
Set<Integer> keySet() { return _m.keySet(); }
int size() { return _m.size(); }
String get(I g) { return _m.get(g); }
Integer getk(I g) { return _m.getk(g); }
@Override public AutoBuffer write_impl(AutoBuffer ab) {
if( _m==null || _m.size()==0 ) return ab.put4(0);
else {
ab.put4(_m.size());
for(Integer g:_m.keySet()) { ab.put4(g); ab.putStr(_m.get(g)); }
}
return ab;
}
@Override public IcedHMIntS read_impl(AutoBuffer ab) {
int mLen;
if( (mLen=ab.get4())!=0 ) {
_m = new NonBlockingHashMap<>();
for( int i=0;i<mLen;++i ) _m.put(ab.get4(), ab.getStr());
}
return this;
}
}
} |
package com.orhanobut.hawk;
import android.util.Log;
@SuppressWarnings("unused")
final class Logger {
private static final int CHUNK_SIZE = 4000;
private static final String TAG = "Hawk";
static void d(String message) {
log(Log.DEBUG, message, null);
}
static void e(String message) {
log(Log.ERROR, message, null);
}
static void e(String message, Throwable throwable) {
log(Log.ERROR, message, throwable);
}
static void w(String message) {
log(Log.WARN, message, null);
}
static void i(String message) {
log(Log.INFO, message, null);
}
static void v(String message) {
log(Log.VERBOSE, message, null);
}
static void wtf(String message) {
log(Log.ASSERT, message, null);
}
private Logger() {
// no instance
}
private static void log(int logType, String message, Throwable throwable) {
LogLevel logLevel = Hawk.getLogLevel();
if (logLevel == LogLevel.NONE) {
return;
}
int length = message != null ? message.length() : 0;
if (length == 0 && throwable == null) {
return;
}
if (length <= CHUNK_SIZE) {
logChunk(logType, message, throwable);
return;
}
for (int i = 0; i < length; i += CHUNK_SIZE) {
int end = Math.min(length, i + CHUNK_SIZE);
logChunk(logType, message.substring(i, end), throwable);
}
}
private static void logChunk(int logType, String chunk, Throwable throwable) {
switch (logType) {
case Log.ERROR:
Log.e(TAG, chunk, throwable);
break;
case Log.INFO:
Log.i(TAG, chunk);
break;
case Log.VERBOSE:
Log.v(TAG, chunk);
break;
case Log.WARN:
Log.w(TAG, chunk);
break;
case Log.ASSERT:
Log.wtf(TAG, chunk);
break;
case Log.DEBUG:
// Fall through, log debug by default
default:
Log.d(TAG, chunk);
break;
}
}
} |
package jsettlers.graphics.map.draw;
import go.graphics.GLDrawContext;
import java.util.ConcurrentModificationException;
import jsettlers.common.Color;
import jsettlers.common.CommonConstants;
import jsettlers.common.buildings.EBuildingType;
import jsettlers.common.buildings.IBuilding;
import jsettlers.common.buildings.IBuilding.IOccupyed;
import jsettlers.common.buildings.IBuildingOccupyer;
import jsettlers.common.buildings.OccupyerPlace;
import jsettlers.common.buildings.OccupyerPlace.ESoldierType;
import jsettlers.common.images.AnimationSequence;
import jsettlers.common.images.EImageLinkType;
import jsettlers.common.images.ImageLink;
import jsettlers.common.images.OriginalImageLink;
import jsettlers.common.map.IGraphicsGrid;
import jsettlers.common.mapobject.EMapObjectType;
import jsettlers.common.mapobject.IArrowMapObject;
import jsettlers.common.mapobject.IAttackableTowerMapObject;
import jsettlers.common.mapobject.IMapObject;
import jsettlers.common.mapobject.IStackMapObject;
import jsettlers.common.material.EMaterialType;
import jsettlers.common.movable.EAction;
import jsettlers.common.movable.EMovableType;
import jsettlers.common.movable.IMovable;
import jsettlers.common.player.IPlayerable;
import jsettlers.common.position.ShortPoint2D;
import jsettlers.common.sound.ISoundable;
import jsettlers.graphics.image.Image;
import jsettlers.graphics.image.SingleImage;
import jsettlers.graphics.map.MapDrawContext;
import jsettlers.graphics.map.draw.settlerimages.SettlerImageMap;
import jsettlers.graphics.map.geometry.MapCoordinateConverter;
import jsettlers.graphics.sequence.Sequence;
import jsettlers.graphics.sound.SoundManager;
/**
* This class handles drawing of objects on the map.
*
* @author michael
*/
public class MapObjectDrawer {
private static final OriginalImageLink INSIDE_BUILDING_RIGHT =
new OriginalImageLink(EImageLinkType.SETTLER, 12, 28, 1);
private static final OriginalImageLink INSIDE_BUILDING_LEFT =
new OriginalImageLink(EImageLinkType.SETTLER, 12, 28, 0);
private static final int FILE = 1;
private static final int TREE_TYPES = 7;
private static final int[] TREE_SEQUENCES = new int[] {
1, 2, 4, 7, 8, 16, 17,
};
private static final int[] TREE_CHANGING_SEQUENCES = new int[] {
3, 3, 6, 9, 9, 18, 18,
};
/**
* First images in tree cutting sequence
*/
private static final int TREE_FALL_IMAGES = 4;
/**
* Tree falling speed. bigger => faster.
*/
private static final float TREE_FALLING_SPEED = 1/0.001f;
private static final int TREE_ROT_IMAGES = 4;
private static final int TREE_SMALL = 12;
private static final int TREE_MEDIUM = 11;
private static final int SMALL_GROWING_TREE = 22;
private static final int CORN = 23;
private static final int CORN_GROW_STEPS = 7;
private static final int CORN_DEAD_STEP = 8;
private static final int WAVES = 26;
private static final int FILE_BORDERPOST = 13;
private static final int STONE = 31;
private static final int SELECTMARK_SEQUENCE = 11;
private static final int SELECTMARK_FILE = 4;
private static final int MILL_FILE = 13;
private static final int MILL_SEQ = 15;
private static final int PIG_SEQ = 0;
private static final int ANIMALS_FILE = 6;
private static final int FISH_SEQ = 7;
private static final AnimationSequence TREE_TEST_SEQUENCE =
new AnimationSequence("tree_test", 0, 5);
private static final int MOVE_TO_MARKER_SEQUENCE = 0;
private static final int MARKER_FILE = 3;
int animationStep = 0;
private ImageProvider imageProvider;
private final SoundManager sound;
private final MapDrawContext context;
private SettlerImageMap imageMap;
private float betweenTilesY;
public MapObjectDrawer(MapDrawContext context, SoundManager sound) {
this.context = context;
this.sound = sound;
}
/**
* Draws a map object at a given position.
*
* @param context
* The context.
* @param map
* For these damned arrwos
* @param pos
* THe position to draw the object.
* @param object
* The object (tree, ...) to draw.
*/
public void drawMapObject(IGraphicsGrid map, int x, int y, IMapObject object) {
forceSetup();
byte fogstatus = context.getVisibleStatus(x, y);
if (fogstatus == 0) {
return; // break
}
float color = getColor(fogstatus);
EMapObjectType type = object.getObjectType();
float progress = object.getStateProgress();
if (type == EMapObjectType.ARROW) {
drawArrow(context, (IArrowMapObject) object, color);
} else {
switch (type) {
case TREE_ADULT:
if (context.ENABLE_ORIGINAL) {
drawTree(x, y, color);
} else {
drawTreeTest(x, y, color);
}
break;
case TREE_DEAD:
// TODO: falling tree sound.
playSound(object, 4);
drawFallingTree(x, y, progress, color);
break;
case TREE_GROWING:
drawGrowingTree(x, y, progress, color);
break;
case CORN_GROWING:
drawGrowingCorn(x, y, object, color);
break;
case CORN_ADULT:
drawCorn(x, y, color);
break;
case CORN_DEAD:
drawDeadCorn(x, y, color);
break;
case WAVES:
drawWaves(x, y, color);
break;
case STONE:
drawStones(x, y, object, color);
break;
case GHOST:
drawPlayerableByProgress(x, y, 12, 27, object, color);
playSound(object, 35);
break;
case BUILDING_DECONSTRUCTION_SMOKE:
drawByProgress(x, y, 13, 38, object.getStateProgress(),
color);
playSound(object, 36);
break;
case FOUND_COAL:
drawByProgress(x, y, FILE, 94, object.getStateProgress(),
color);
break;
case FOUND_GEMSTONE:
drawByProgress(x, y, FILE, 95, object.getStateProgress(),
color);
break;
case FOUND_GOLD:
drawByProgress(x, y, FILE, 96, object.getStateProgress(),
color);
break;
case FOUND_IRON:
drawByProgress(x, y, FILE, 97, object.getStateProgress(),
color);
break;
case FOUND_BRIMSTONE:
drawByProgress(x, y, FILE, 98, object.getStateProgress(),
color);
break;
case FOUND_NOTHING:
drawByProgress(x, y, FILE, 99, object.getStateProgress(),
color);
break;
case BUILDINGSITE_SIGN:
drawByProgress(x, y, FILE, 93, object.getStateProgress(),
color);
break;
case BUILDINGSITE_POST:
drawByProgress(x, y, FILE, 92, object.getStateProgress(),
color);
break;
case WORKAREA_MARK:
drawByProgress(x, y, FILE, 91, object.getStateProgress(),
color);
break;
case FLAG_DOOR:
drawPlayerableWaving(x, y, 13, 63, object, color);
break;
case CONSTRUCTION_MARK:
drawByProgress(x, y, 4, 6, object.getStateProgress(), color);
break;
case FLAG_ROOF:
float z = context.getDrawBuffer().getZ();
context.getDrawBuffer().setZ(.89f);
drawPlayerableWaving(x, y, 13, 64, object, color);
context.getDrawBuffer().setZ(z);
break;
case BUILDING:
drawBuilding(x, y, (IBuilding) object, color);
break;
case STACK_OBJECT:
drawStack(x, y, (IStackMapObject) object, color);
break;
case SMOKE:
drawByProgress(x, y, 13, 42, progress, color);
break;
case WINE:
drawByProgress(x, y, 1, 25, progress, color);
break;
case PLANT_DECORATION: {
int step = (x * 13 + y * 233) % 8;
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(1, 27);
draw(seq.getImageSafe(step), x, y, color);
}
break;
case DESERT_DECORATION: {
int step = (x * 13 + y * 233) % 5 + 10;
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(1, 27);
draw(seq.getImageSafe(step), x, y, color);
}
break;
case PIG: {
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(ANIMALS_FILE,
PIG_SEQ);
if (seq.length() > 0) {
int i = getAnimationStep(x, y) / 2;
int step = i % seq.length();
draw(seq.getImageSafe(step), x, y, color);
}
}
break;
case FISH_DECORATION: {
int step = getAnimationStep(x, y);
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(ANIMALS_FILE,
FISH_SEQ);
int substep = step % 1024;
if (substep < 15) {
int subseq = (step / 1024) % 4;
draw(seq.getImageSafe(subseq * 15 + substep), x, y,
color);
}
}
break;
case ATTACKABLE_TOWER: {
IMovable movable =
((IAttackableTowerMapObject) object).getMovable();
if (movable != null) {
Image image = this.imageMap.getImageForSettler(movable);
drawMovableAt(movable, image, x, y);
playMovableSound(movable);
}
}
break;
default:
break;
}
}
if (object.getNextObject() != null) {
drawMapObject(map, x, y, object.getNextObject());
}
}
private void forceSetup() {
if (imageProvider == null) {
imageProvider = ImageProvider.getInstance();
imageMap = SettlerImageMap.getInstance();
}
}
/**
* Draws a movable
*
* @param movable
* The movable.
*/
public void draw(IMovable movable) {
forceSetup();
Image image = this.imageMap.getImageForSettler(movable);
drawImage(movable, image);
playMovableSound(movable);
}
private void playMovableSound(IMovable movable) {
if (!movable.isSoundPlayed()) {
final EAction action = movable.getAction();
if (action == EAction.ACTION1) {
playSoundAction1(movable.getMovableType());
movable.setSoundPlayed();
} else if (action == EAction.ACTION2) {
playSoundAction2(movable.getMovableType());
movable.setSoundPlayed();
}
}
}
private void playSoundAction1(EMovableType type) {
switch (type) {
case LUMBERJACK:
sound.playSound(1, 1, 1);
break;
case STONECUTTER:
sound.playSound(3, 1, 1);
break;
case DIGGER:
sound.playSound(2, 1, 1);
break;
case SAWMILLER:
sound.playSound(5, 1, 1);
break;
case SMITH:
sound.playSound(6, 1, 1);
break;
case FARMER:
sound.playSound(12, 1, 1);
break;
case SWORDSMAN_L1:
case SWORDSMAN_L2:
case SWORDSMAN_L3:
sound.playSound(30, 1, 1);
break;
case BOWMAN_L1:
case BOWMAN_L2:
case BOWMAN_L3:
sound.playSound(33, 1, 1);
break;
}
}
private void playSoundAction2(EMovableType type) {
// currently there is nobody who needs this.
}
private void drawImage(IMovable movable, Image image) {
ShortPoint2D pos = movable.getPos();
short x = pos.x;
short y = pos.y;
drawMovableAt(movable, image, x, y);
}
private void drawMovableAt(IMovable movable, Image image, int x, int y) {
byte fogstatus = context.getVisibleStatus(x, y);
if (fogstatus <= CommonConstants.FOG_OF_WAR_EXPLORED) {
return; // break
}
Color color = context.getPlayerColor(movable.getPlayerId());
float shade = MapObjectDrawer.getColor(fogstatus);
float viewX;
float viewY;
if (movable.getAction() == EAction.WALKING) {
int originx = x - movable.getDirection().getGridDeltaX();
int originy = y - movable.getDirection().getGridDeltaY();
viewX =
betweenTilesX(originx, originy, x, y,
movable.getMoveProgress());
viewY = betweenTilesY;
} else {
int height = context.getHeight(x, y);
viewX = context.getConverter().getViewX(x, y, height);
viewY = context.getConverter().getViewY(x, y, height);
}
image.drawAt(context.getGl(), context.getDrawBuffer(), viewX, viewY,
color, shade);
if (movable.isSelected()) {
drawSelectionMark(viewX, viewY, movable.getHealth());
}
}
private float betweenTilesX(int startx, int starty, int destinationx,
int destinationy, float progress) {
float theight = context.getHeight(startx, starty);
float dheight = context.getHeight(destinationx, destinationy);
MapCoordinateConverter converter = context.getConverter();
float x =
(1 - progress)
* converter.getViewX(startx, starty, theight)
+ progress
* converter.getViewX(destinationx, destinationy,
dheight);
betweenTilesY =
(1 - progress)
* converter.getViewY(startx, starty, theight)
+ progress
* converter.getViewY(destinationx, destinationy,
dheight);
return x;
}
private void drawSelectionMark(float viewX, float viewY, float health) {
float z = context.getDrawBuffer().getZ();
context.getDrawBuffer().setZ(.9f);
Image image =
ImageProvider.getInstance().getSettlerSequence(4, 7)
.getImageSafe(0);
image.drawAt(context.getGl(), context.getDrawBuffer(), viewX,
viewY + 20, -1);
Sequence<? extends Image> sequence =
ImageProvider.getInstance().getSettlerSequence(4, 6);
int healthId =
Math.min((int) ((1 - health) * sequence.length()),
sequence.length() - 1);
Image healthImage = sequence.getImageSafe(healthId);
healthImage.drawAt(context.getGl(), context.getDrawBuffer(), viewX,
viewY + 38, -1);
context.getDrawBuffer().setZ(z);
}
private void playSound(IMapObject object, int soundid) {
if (object instanceof ISoundable) {
ISoundable soundable = (ISoundable) object;
if (!soundable.isSoundPlayed()) {
sound.playSound(soundid, 1, 1);
soundable.setSoundPlayed();
}
}
}
private void drawArrow(MapDrawContext context, IArrowMapObject object,
float color) {
int sequence = 0;
switch (object.getDirection()) {
case SOUTH_WEST:
sequence = 100;
break;
case WEST:
sequence = 101;
break;
case NORTH_WEST:
sequence = 102;
break;
case NORTH_EAST:
sequence = 103;
break;
case EAST:
sequence = 104;
break;
case SOUTH_EAST:
sequence = 104;
break;
}
float progress = object.getStateProgress();
int index = Math.round(progress * 2);
float x =
betweenTilesX(object.getSourceX(), object.getSourceY(),
object.getTargetX(), object.getTargetY(), progress);
int iColor = Color.getABGR(color, color, color, 1);
boolean onGround = progress >= 1;
float z = 0;
if (onGround) {
z = context.getDrawBuffer().getZ();
context.getDrawBuffer().setZ(-.1f);
iColor &= 0x7fffffff;
}
Image image =
this.imageProvider.getSettlerSequence(FILE, sequence)
.getImageSafe(index);
image.drawAt(context.getGl(), context.getDrawBuffer(), x, betweenTilesY
+ 20 * progress * (1 - progress) + 20, iColor);
if (onGround) {
context.getDrawBuffer().setZ(z);
}
}
private void drawStones(int x, int y, IMapObject object, float color) {
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE, STONE);
int stones = (int) (seq.length() - object.getStateProgress() - 1);
draw(seq.getImageSafe(stones), x, y, color);
}
private void drawWaves(int x, int y, float color) {
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE, WAVES);
int len = seq.length();
int step = (animationStep / 2 + x / 2 + y / 2) % len;
if (step < len) {
draw(seq.getImageSafe(step), x, y, color);
}
}
private void drawDeadCorn(int x, int y, float color) {
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE, CORN);
draw(seq.getImageSafe(CORN_DEAD_STEP), x, y, color);
}
private void drawGrowingCorn(int x, int y, IMapObject object, float color) {
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE, CORN);
int step = (int) (object.getStateProgress() * CORN_GROW_STEPS);
draw(seq.getImageSafe(step), x, y, color);
}
private void drawCorn(int x, int y, float color) {
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE, CORN);
int step = CORN_GROW_STEPS;
draw(seq.getImageSafe(step), x, y, color);
}
private void drawGrowingTree(int x, int y, float progress, float color) {
Image image;
if (progress < 0.33) {
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE,
SMALL_GROWING_TREE);
image = seq.getImageSafe(0);
} else {
int treeType = getTreeType(x, y);
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE,
TREE_CHANGING_SEQUENCES[treeType]);
if (progress < 0.66) {
image = seq.getImageSafe(TREE_SMALL);
} else {
image = seq.getImageSafe(TREE_MEDIUM);
}
}
draw(image, x, y, color);
}
private void drawFallingTree(int x, int y, float progress, float color) {
int treeType = getTreeType(x, y);
int imageStep = 0;
if (progress < IMapObject.TREE_CUT_1) {
imageStep = (int) (progress * TREE_FALLING_SPEED);
if (imageStep >= TREE_FALL_IMAGES) {
imageStep = TREE_FALL_IMAGES - 1;
}
} else if (progress < IMapObject.TREE_CUT_2) {
// cut image 1
imageStep = TREE_FALL_IMAGES;
} else if (progress < IMapObject.TREE_CUT_3) {
// cut image 2
imageStep = TREE_FALL_IMAGES + 1;
} else if (progress < IMapObject.TREE_TAKEN) {
// cut image 3
imageStep = TREE_FALL_IMAGES + 2;
} else {
int relativeStep =
(int) ((progress - IMapObject.TREE_TAKEN)
/ (1 - IMapObject.TREE_TAKEN) * TREE_ROT_IMAGES);
imageStep = relativeStep + TREE_FALL_IMAGES + 3;
}
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE,
TREE_CHANGING_SEQUENCES[treeType]);
draw(seq.getImageSafe(imageStep), x, y, color);
}
private void drawTree(int x, int y, float color) {
int treeType = getTreeType(x, y);
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE,
TREE_SEQUENCES[treeType]);
int step = getAnimationStep(x, y) % seq.length();
draw(seq.getImageSafe(step), x, y, color);
}
private void drawTreeTest(int x, int y, float color) {
int step = getAnimationStep(x, y) % TREE_TEST_SEQUENCE.getLength();
draw(imageProvider.getImage(TREE_TEST_SEQUENCE.getImage(step)), x, y,
color);
}
/**
* gets a 0 or a 1.
*
* @param pos
* @return
*/
// private static int get01(int x, int y) {
// return (x * 677 + y) % 2;
/**
* Draws a player border at a given position.
*
* @param player
* The player.
*/
public void drawPlayerBorderObject(int x, int y, byte player) {
forceSetup();
byte fogstatus = context.getVisibleStatus(x, y);
if (fogstatus <= CommonConstants.FOG_OF_WAR_EXPLORED) {
return; // break
}
float base = getColor(fogstatus);
Color color = context.getPlayerColor(player);
draw(imageProvider.getSettlerSequence(FILE_BORDERPOST, 65)
.getImageSafe(0), x, y, color, base);
}
private static int getTreeType(int x, int y) {
return (x + x / 5 + y / 3 + y + y / 7) % TREE_TYPES;
}
private int getAnimationStep(int x, int y) {
return 0xfffffff & (this.animationStep + x * 167 + y * 1223);
}
/**
* Increases the animation step for trees and other stuff.
*/
public void increaseAnimationStep() {
this.animationStep =
((int) System.currentTimeMillis() / 100) & 0x7fffffff;
}
/**
* Draws a stack
*
* @param context
* The context to draw with
* @param object
* The stack to draw.
*/
public void drawStack(int x, int y, IStackMapObject object, float color) {
forceSetup();
byte elements = object.getSize();
if (elements > 0) {
drawStackAtScreen(x, y, object.getMaterialType(), elements, color);
}
}
/**
* Draws the stack directly to the screen.
*
* @param glDrawContext
* The gl context to draw at.
* @param material
* The material the stack should have.
* @param count
* The number of elements on the stack
*/
private void drawStackAtScreen(int x, int y, EMaterialType material,
int count, float color) {
int stackIndex = material.getStackIndex();
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(FILE, stackIndex);
draw(seq.getImageSafe(count - 1), x, y, color);
}
/**
* Gets the gray color for a given fog.
*
* @param fogstatus
* @return
*/
public static float getColor(int fogstatus) {
return (float) fogstatus / CommonConstants.FOG_OF_WAR_VISIBLE;
}
/**
* Draws a given buildng to the context.
*
* @param context
* @param building
* @param color
* Gray color shade
*/
private void drawBuilding(int x, int y, IBuilding building, float color) {
EBuildingType type = building.getBuildingType();
float state = building.getStateProgress();
float maskState;
if (state < 0.5f) {
maskState = state * 2;
for (ImageLink link : type.getBuildImages()) {
Image image = imageProvider.getImage(link);
drawWithConstructionMask(x, y, maskState, image, color);
}
} else if (state < 0.99) {
maskState = state * 2 - 1;
for (ImageLink link : type.getBuildImages()) {
Image image = imageProvider.getImage(link);
draw(image, x, y, color);
}
for (ImageLink link : type.getImages()) {
Image image = imageProvider.getImage(link);
drawWithConstructionMask(x, y, maskState, image, color);
}
} else {
if (type == EBuildingType.MILL
&& ((IBuilding.IMill) building).isRotating()) {
Sequence<? extends Image> seq =
this.imageProvider.getSettlerSequence(MILL_FILE,
MILL_SEQ);
if (seq.length() > 0) {
int i = getAnimationStep(x, y);
int step = i % seq.length();
draw(seq.getImageSafe(step), x, y, color);
}
playSound(building, 42);
} else {
ImageLink[] images = type.getImages();
if (images.length > 0) {
Image image = imageProvider.getImage(images[0]);
draw(image, x, y, color);
}
if (building instanceof IBuilding.IOccupyed
&& context.getVisibleStatus(x, y) > CommonConstants.FOG_OF_WAR_EXPLORED) {
drawOccupiers(x, y, (IBuilding.IOccupyed) building, color);
}
for (int i = 1; i < images.length; i++) {
Image image = imageProvider.getImage(images[i]);
draw(image, x, y, color);
}
}
}
if (building.isSelected()) {
drawBuildingSelectMarker(x, y);
}
}
/**
* Draws the occupiers of a building
*
* @param x
* The x coordinate of the building
* @param y
* @param building
* The occupyed building
* @param basecolor
* The base color (gray shade).
*/
private void drawOccupiers(int x, int y, IOccupyed building, float basecolor) {
// this can cause a ConcurrentModificationException when
// a soldier enters the tower!
try {
int height = context.getHeight(x, y);
float towerX = context.getConverter().getViewX(x, y, height);
float towerY = context.getConverter().getViewY(x, y, height);
GLDrawContext gl = context.getGl();
for (IBuildingOccupyer occupyer : building.getOccupyers()) {
OccupyerPlace place = occupyer.getPlace();
IMovable movable = occupyer.getMovable();
Color color = context.getPlayerColor(movable.getPlayerId());
Image image;
switch (place.getType()) {
case INFANTRY:
OriginalImageLink imageLink =
place.looksRight() ? INSIDE_BUILDING_RIGHT
: INSIDE_BUILDING_LEFT;
image = imageProvider.getImage(imageLink);
break;
case BOWMAN:
default:
image = this.imageMap.getImageForSettler(movable);
}
float viewX = towerX + place.getOffsetX();
float viewY = towerY + place.getOffsetY();
image.drawAt(gl, context.getDrawBuffer(), viewX, viewY, color,
basecolor);
if (place.getType() == ESoldierType.BOWMAN) {
playMovableSound(movable);
if (movable.isSelected()) {
drawSelectionMark(viewX, viewY, movable.getHealth());
}
}
}
} catch (ConcurrentModificationException e) {
// happens sometime, just ignore it.
}
}
private void drawBuildingSelectMarker(int x, int y) {
float z = context.getDrawBuffer().getZ();
context.getDrawBuffer().setZ(.9f);
Image image =
imageProvider.getSettlerSequence(SELECTMARK_FILE,
SELECTMARK_SEQUENCE).getImageSafe(0);
draw(image, x, y, -1);
context.getDrawBuffer().setZ(z);
}
private void drawWithConstructionMask(int x, int y, float maskState,
Image unsafeimage, float color) {
if (!(unsafeimage instanceof SingleImage)) {
return; // should not happen
}
int height = context.getHeight(x, y);
float viewX = context.getConverter().getViewX(x, y, height);
float viewY = context.getConverter().getViewY(x, y, height);
int iColor = Color.getABGR(color, color, color, 1);
SingleImage image = (SingleImage) unsafeimage;
// number of tiles in x direction, can be adjusted for performance
int tiles = 6;
float toplineBottom = 1 - maskState;
float toplineTop = Math.max(0, toplineBottom - .1f);
image.drawTriangle(context.getGl(), context.getDrawBuffer(), viewX,
viewY, 0, 1, 1, 1, 0, toplineBottom, iColor);
image.drawTriangle(context.getGl(), context.getDrawBuffer(), viewX,
viewY, 1, 1, 1, toplineBottom, 0, toplineBottom, iColor);
for (int i = 0; i < tiles; i++) {
image.drawTriangle(context.getGl(), context.getDrawBuffer(), viewX,
viewY, 1.0f / tiles * i, toplineBottom, 1.0f / tiles
* (i + 1), toplineBottom, 1.0f / tiles * (i + .5f),
toplineTop, iColor);
}
}
private void drawPlayerableByProgress(int x, int y, int file,
int sequenceIndex, IMapObject object, float basecolor) {
Sequence<? extends Image> sequence =
this.imageProvider.getSettlerSequence(file, sequenceIndex);
int index =
Math.min((int) (object.getStateProgress() * sequence.length()),
sequence.length() - 1);
Color color = getColor(object);
draw(sequence.getImage(index), x, y, color, basecolor);
}
private Color getColor(IMapObject object) {
Color color = null;
if (object instanceof IPlayerable) {
color =
context.getPlayerColor(((IPlayerable) object).getPlayerId());
}
return color;
}
private void drawPlayerableWaving(int x, int y, int file,
int sequenceIndex, IMapObject object, float basecolor) {
Sequence<? extends Image> sequence =
this.imageProvider.getSettlerSequence(file, sequenceIndex);
int index = animationStep % sequence.length();
Color color = getColor(object);
draw(sequence.getImageSafe(index), x, y, color, basecolor);
}
private void drawByProgress(int x, int y, int file, int sequenceIndex,
float progress, float color) {
Sequence<? extends Image> sequence =
this.imageProvider.getSettlerSequence(file, sequenceIndex);
int index =
Math.min((int) (progress * sequence.length()),
sequence.length() - 1);
draw(sequence.getImageSafe(index), x, y, color);
}
private void draw(Image image, int x, int y, Color color, float basecolor) {
int height = context.getHeight(x, y);
float viewX = context.getConverter().getViewX(x, y, height);
float viewY = context.getConverter().getViewY(x, y, height);
image.drawAt(context.getGl(), context.getDrawBuffer(), viewX, viewY,
color, basecolor);
}
private void draw(Image image, int x, int y, float color) {
int iColor = Color.getABGR(color, color, color, 1);
draw(image, x, y, iColor);
}
private void draw(Image image, int x, int y, int color) {
int height = context.getHeight(x, y);
float viewX = context.getConverter().getViewX(x, y, height);
float viewY = context.getConverter().getViewY(x, y, height);
image.drawAt(context.getGl(), context.getDrawBuffer(), viewX, viewY,
color);
}
public void drawMoveToMarker(ShortPoint2D moveToMarker, float progress) {
forceSetup();
drawByProgress(moveToMarker.x, moveToMarker.y, MARKER_FILE,
MOVE_TO_MARKER_SEQUENCE, progress, 1);
}
} |
package com.desarrollo.multicentro_mascotas.controller;
import com.desarrollo.multicentro_mascotas.dominio.Menu;
import com.desarrollo.multicentro_mascotas.dominio.Usuarios;
import com.desarrollo.multicentro_mascotas.ebj.MenuFacadeLocal;
import java.io.Serializable;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.enterprise.context.SessionScoped;
import javax.faces.context.FacesContext;
import javax.inject.Named;
import org.primefaces.model.menu.DefaultMenuItem;
import org.primefaces.model.menu.DefaultMenuModel;
import org.primefaces.model.menu.DefaultSubMenu;
import org.primefaces.model.menu.MenuModel;
/**
*
* @author mpluas
*/
@Named
@SessionScoped
public class menuController implements Serializable{
@EJB
private MenuFacadeLocal MenuEJB;
private List<Menu> lista;
private MenuModel model;
@PostConstruct
public void init(){
this.listarMenu();
model = new DefaultMenuModel();
this.establecerPermisos();
}
public void listarMenu(){
try {
lista = MenuEJB.findAll();
} catch (Exception e) {
//mensaje jsf
}
}
public MenuModel getModel() {
return model;
}
public void setModel(MenuModel model) {
this.model = model;
}
public void establecerPermisos(){
Usuarios usu = (Usuarios) FacesContext.getCurrentInstance().getExternalContext().getSessionMap().get("Usuarios");
DefaultMenuItem menItm = new DefaultMenuItem("Principal");
menItm.setIcon("icon-home-outline");
menItm.setTitle("Pagina Principal");
menItm.setOutcome("/principal/principal");
menItm.setPartialSubmit(true);
menItm.setProcess("@this");
menItm.setContainerStyleClass("layout-menubar-active");
model.addElement(menItm);
for (Menu m : lista) {
System.out.println("rol " + usu.getIdRol().getIdRol());
System.out.println("rol " + m.getIdRol().getIdRol());
if (m.getTipo().equals("S") && m.getIdRol().getIdRol().equals(usu.getIdRol().getIdRol())) {
DefaultSubMenu firsSubMenu = new DefaultSubMenu(m.getOpcion());
firsSubMenu.setIcon(m.getRutaImagen());
for (Menu i : lista) {
Menu submenu = i.getMenuPadre();
if (submenu != null) {
if (submenu.getIdMenu() == m.getIdMenu()) {
DefaultMenuItem item = new DefaultMenuItem(i.getOpcion());
item.setIcon(i.getRutaImagen());
firsSubMenu.addElement(item);
}
}
}
model.addElement(firsSubMenu);
}else{
if (m.getMenuPadre() == null && m.getIdRol().getIdRol().equals(usu.getIdRol().getIdRol())) {
DefaultMenuItem item = new DefaultMenuItem(m.getOpcion());
item.setIcon(m.getRutaImagen());
model.addElement(item);
}
}
}
}
} |
package com.reimaginebanking.api.nessieandroidsdk;
/**
* Custom results listener which defines callback methods for success and error on API response.
*/
public interface NessieResultsListener {
void onSuccess(Object result);
void onFailure(NessieError error);
} |
package org.eclipse.mylyn.internal.resources.ui;
import java.util.HashSet;
import java.util.Set;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.eclipse.core.resources.IResourceDelta;
import org.eclipse.core.resources.IResourceDeltaVisitor;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.mylyn.commons.core.StatusHandler;
import org.eclipse.mylyn.context.core.ContextCore;
import org.eclipse.mylyn.monitor.core.InteractionEvent;
import org.eclipse.mylyn.resources.ui.ResourcesUi;
/**
* @author Mik Kersten
*/
public class ResourceChangeMonitor implements IResourceChangeListener {
private boolean enabled = true;
public ResourceChangeMonitor() {
}
public void resourceChanged(IResourceChangeEvent event) {
if (!enabled || !ContextCore.getContextManager().isContextActive()) {
return;
}
if (event.getType() != IResourceChangeEvent.POST_CHANGE) {
return;
}
final Set<IResource> addedResources = new HashSet<IResource>();
final Set<IResource> changedResources = new HashSet<IResource>();
final Set<String> excludedPatterns = ResourcesUiPreferenceInitializer.getExcludedResourcePatterns();
excludedPatterns.addAll(ResourcesUiPreferenceInitializer.getForcedExcludedResourcePatterns());
IResourceDelta rootDelta = event.getDelta();
IResourceDeltaVisitor visitor = new IResourceDeltaVisitor() {
public boolean visit(IResourceDelta delta) {
IResourceDelta[] added = delta.getAffectedChildren(IResourceDelta.ADDED);
for (IResourceDelta element : added) {
IResource resource = element.getResource();
if ((resource instanceof IFile || resource instanceof IFolder)
&& !isExcluded(resource.getProjectRelativePath(), resource, excludedPatterns)) {
addedResources.add(resource);
}
}
// int changeMask = IResourceDelta.CONTENT | IResourceDelta.REMOVED | IResourceDelta.MOVED_TO | IResourceDelta.MOVED_FROM;
IResourceDelta[] changed = delta.getAffectedChildren(IResourceDelta.CONTENT | IResourceDelta.REMOVED);
for (IResourceDelta element : changed) {
IResource resource = element.getResource();
if (resource instanceof IFile) {
changedResources.add(resource);
}
}
return true;
}
};
try {
rootDelta.accept(visitor);
ResourcesUi.addResourceToContext(changedResources, InteractionEvent.Kind.PREDICTION);
ResourcesUi.addResourceToContext(addedResources, InteractionEvent.Kind.SELECTION);
} catch (CoreException e) {
StatusHandler.log(new Status(IStatus.ERROR, ResourcesUiBridgePlugin.ID_PLUGIN,
"Could not accept marker visitor", e));
}
}
/**
* Public for testing.
*
* @param resource
* can be null
*/
public boolean isExcluded(IPath path, IResource resource, Set<String> excludedPatterns) {
boolean excluded = false;
// NOTE: n^2 time complexity, but should not be a bottleneck
for (String pattern : excludedPatterns) {
if (resource != null && pattern.startsWith("file:/")) {
excluded |= isUriExcluded(resource.getLocationURI().toString(), pattern);
} else {
for (String segment : path.segments()) {
excluded |= segment.matches(pattern.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*"));
}
}
if (excluded) {
break;
}
}
return excluded;
}
/**
* Public for testing.
*/
public boolean isUriExcluded(String uri, String pattern) {
if (uri != null && uri.startsWith(pattern)) {
return true;
} else {
return false;
}
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
} |
package org.eclipse.titan.designer.AST.TTCN3.definitions;
import java.text.MessageFormat;
import java.util.List;
import org.eclipse.core.runtime.Platform;
import org.eclipse.titan.designer.GeneralConstants;
import org.eclipse.titan.designer.AST.ASTVisitor;
import org.eclipse.titan.designer.AST.INamedNode;
import org.eclipse.titan.designer.AST.IReferenceChain;
import org.eclipse.titan.designer.AST.ISubReference;
import org.eclipse.titan.designer.AST.IType;
import org.eclipse.titan.designer.AST.Identifier;
import org.eclipse.titan.designer.AST.Location;
import org.eclipse.titan.designer.AST.NamingConventionHelper;
import org.eclipse.titan.designer.AST.ReferenceFinder;
import org.eclipse.titan.designer.AST.Scope;
import org.eclipse.titan.designer.AST.Type;
import org.eclipse.titan.designer.AST.ISubReference.Subreference_type;
import org.eclipse.titan.designer.AST.ReferenceFinder.Hit;
import org.eclipse.titan.designer.AST.TTCN3.TemplateRestriction;
import org.eclipse.titan.designer.AST.TTCN3.templates.ITTCN3Template;
import org.eclipse.titan.designer.AST.TTCN3.templates.TTCN3Template;
import org.eclipse.titan.designer.AST.TTCN3.templates.ITTCN3Template.Template_type;
import org.eclipse.titan.designer.editors.ProposalCollector;
import org.eclipse.titan.designer.editors.actions.DeclarationCollector;
import org.eclipse.titan.designer.parsers.CompilationTimeStamp;
import org.eclipse.titan.designer.parsers.ttcn3parser.IIdentifierReparser;
import org.eclipse.titan.designer.parsers.ttcn3parser.IdentifierReparser;
import org.eclipse.titan.designer.parsers.ttcn3parser.ReParseException;
import org.eclipse.titan.designer.parsers.ttcn3parser.TTCN3ReparseUpdater;
import org.eclipse.titan.designer.parsers.ttcn3parser.Ttcn3Lexer;
import org.eclipse.titan.designer.preferences.PreferenceConstants;
import org.eclipse.titan.designer.productUtilities.ProductConstants;
/**
* The Def_Var class represents TTCN3 template variables.
*
* @author Kristof Szabados
* @author Arpad Lovassy
*/
public final class Def_Var_Template extends Definition {
private static final String FULLNAMEPART1 = ".<type>";
private static final String FULLNAMEPART2 = ".<initial_value>";
public static final String PORTNOTALLOWED = "Template variable can not be defined for port type `{0}''";
private static final String PARAMETRIZED_LOCAL_TEMPLATE_VAR = "Code generation for parameterized local template variable `{0}'' is not yet supported";
private static final String KIND = " template variable definition";
private final Type type;
/**
* Formal parameters.
* NOTE: It is not yet supported, so semantic error must be marked if not null
*/
private FormalParameterList mFormalParList;
private final TTCN3Template initialValue;
private final TemplateRestriction.Restriction_type templateRestriction;
private boolean wasAssigned;
public Def_Var_Template( final TemplateRestriction.Restriction_type templateRestriction,
final Identifier identifier,
final Type type,
final FormalParameterList aFormalParList,
final TTCN3Template initialValue) {
super(identifier);
this.templateRestriction = templateRestriction;
this.type = type;
mFormalParList = aFormalParList;
this.initialValue = initialValue;
if (type != null) {
type.setFullNameParent(this);
}
if (initialValue != null) {
initialValue.setFullNameParent(this);
}
}
@Override
public Assignment_type getAssignmentType() {
return Assignment_type.A_VAR_TEMPLATE;
}
@Override
public StringBuilder getFullName(final INamedNode child) {
StringBuilder builder = super.getFullName(child);
if (type == child) {
return builder.append(FULLNAMEPART1);
} else if (initialValue == child) {
return builder.append(FULLNAMEPART2);
}
return builder;
}
@Override
public String getAssignmentName() {
return "template variable";
}
@Override
public String getDescription() {
StringBuilder builder = new StringBuilder();
builder.append(getAssignmentName()).append(" `");
if (isLocal()) {
builder.append(identifier.getDisplayName());
} else {
builder.append(getFullName());
}
builder.append('\'');
return builder.toString();
}
@Override
public String getOutlineIcon() {
return "template_dynamic.gif";
}
@Override
public int category() {
int result = super.category();
if (type != null) {
result += type.category();
}
return result;
}
@Override
public void setMyScope(final Scope scope) {
super.setMyScope(scope);
if (type != null) {
type.setMyScope(scope);
}
if (initialValue != null) {
initialValue.setMyScope(scope);
}
}
@Override
public Type getType(final CompilationTimeStamp timestamp) {
check(timestamp);
return type;
}
@Override
public void check(final CompilationTimeStamp timestamp) {
check(timestamp, null);
}
@Override
public void check(final CompilationTimeStamp timestamp, IReferenceChain refChain) {
if (lastTimeChecked != null && !lastTimeChecked.isLess(timestamp)) {
return;
}
isUsed = false;
wasAssigned = false;
NamingConventionHelper.checkConvention(PreferenceConstants.REPORTNAMINGCONVENTION_LOCAL_VARTEMPLATE, identifier, this);
NamingConventionHelper.checkNameContents(identifier, getMyScope().getModuleScope().getIdentifier(), getDescription());
if (type == null) {
lastTimeChecked = timestamp;
return;
}
type.check(timestamp);
lastTimeChecked = timestamp;
if (initialValue == null) {
return;
}
IType lastType = type.getTypeRefdLast(timestamp);
switch (lastType.getTypetype()) {
case TYPE_PORT:
location.reportSemanticError(MessageFormat.format(PORTNOTALLOWED, lastType.getFullName()));
break;
default:
break;
}
TTCN3Template realInitialValue = initialValue;
initialValue.setMyGovernor(type);
// Needed in case of universal charstring templates
if (initialValue.getTemplatetype() == Template_type.CSTR_PATTERN && lastType.getTypetype() == Type.Type_type.TYPE_UCHARSTRING) {
realInitialValue = initialValue.setTemplatetype(timestamp, Template_type.USTR_PATTERN);
// FIXME implement setting the pattern type, once
// universal charstring pattern are supported.
}
ITTCN3Template temporalValue = type.checkThisTemplateRef(timestamp, realInitialValue);
temporalValue.checkThisTemplateGeneric(timestamp, type, true, true, true, true, false);
TemplateRestriction.check(timestamp, this, initialValue, null);
// Only to follow the pattern, otherwise no such field can exist
// here
if (withAttributesPath != null) {
withAttributesPath.checkGlobalAttributes(timestamp, false);
withAttributesPath.checkAttributes(timestamp);
}
if ( mFormalParList != null ) {
mFormalParList.reset();
mFormalParList.check(timestamp, getAssignmentType());
// template variable is always local
location.reportSemanticError(MessageFormat.format(PARAMETRIZED_LOCAL_TEMPLATE_VAR, getFullName()));
}
}
@Override
public void postCheck() {
super.postCheck();
if (!wasAssigned) {
location.reportConfigurableSemanticProblem(
Platform.getPreferencesService().getString(ProductConstants.PRODUCT_ID_DESIGNER,
PreferenceConstants.REPORTREADONLY, GeneralConstants.WARNING, null),
MessageFormat.format("The {0} seems to be never written, maybe it could be a template", getDescription()));
}
}
public TTCN3Template getInitialValue() {
return initialValue;
}
/**
* Indicates that this variable template was used in a way where its
* value can be changed.
* */
public void setWritten() {
wasAssigned = true;
}
@Override
public boolean checkIdentical(final CompilationTimeStamp timestamp, final Definition definition) {
check(timestamp);
definition.check(timestamp);
if (!Assignment_type.A_VAR_TEMPLATE.equals(definition.getAssignmentType())) {
location.reportSemanticError(MessageFormat
.format("Local definition `{0}'' is a template variable, but the definition inherited from component type `{1}'' is a {2}",
identifier.getDisplayName(), definition.getMyScope().getFullName(),
definition.getAssignmentName()));
return false;
}
Def_Var_Template otherVariable = (Def_Var_Template) definition;
if (!type.isIdentical(timestamp, otherVariable.type)) {
final String message = MessageFormat
.format("Local template variable `{0}'' has type `{1}'', but the template variable inherited from component type `{2}'' has type `{3}''",
identifier.getDisplayName(), type.getTypename(), otherVariable.getMyScope().getFullName(),
otherVariable.type.getTypename());
type.getLocation().reportSemanticError(message);
return false;
}
if (initialValue != null) {
if (otherVariable.initialValue == null) {
initialValue.getLocation()
.reportSemanticWarning(
MessageFormat.format(
"Local template variable `{0}'' has initial value, but the template variable inherited from component type `{1}'' does not",
identifier.getDisplayName(), otherVariable.getMyScope().getFullName()));
}
} else if (otherVariable.initialValue != null) {
location.reportSemanticWarning(MessageFormat
.format("Local template variable `{0}'' does not have initial value, but the template variable inherited from component type `{1}'' has",
identifier.getDisplayName(), otherVariable.getMyScope().getFullName()));
}
return true;
}
@Override
public String getProposalKind() {
StringBuilder builder = new StringBuilder();
if (type != null) {
type.getProposalDescription(builder);
}
builder.append(KIND);
return builder.toString();
}
@Override
public void addProposal(final ProposalCollector propCollector, final int i) {
List<ISubReference> subrefs = propCollector.getReference().getSubreferences();
if (subrefs.size() <= i) {
return;
}
if (subrefs.size() == i + 1 && identifier.getName().toLowerCase().startsWith(subrefs.get(i).getId().getName().toLowerCase())) {
super.addProposal(propCollector, i);
}
if (subrefs.size() > i + 1 && type != null && identifier.getName().equals(subrefs.get(i).getId().getName())) {
// perfect match
type.addProposal(propCollector, i + 1);
}
}
@Override
public void addDeclaration(final DeclarationCollector declarationCollector, final int i) {
List<ISubReference> subrefs = declarationCollector.getReference().getSubreferences();
if (subrefs.size() > i && identifier.getName().equals(subrefs.get(i).getId().getName())) {
if (subrefs.size() > i + 1 && type != null) {
type.addDeclaration(declarationCollector, i + 1);
} else if (subrefs.size() == i + 1 && Subreference_type.fieldSubReference.equals(subrefs.get(i).getReferenceType())) {
declarationCollector.addDeclaration(this);
}
}
}
@Override
public TemplateRestriction.Restriction_type getTemplateRestriction() {
return templateRestriction;
}
@Override
public List<Integer> getPossibleExtensionStarterTokens() {
List<Integer> result = super.getPossibleExtensionStarterTokens();
if (initialValue == null) {
result.add(Ttcn3Lexer.ASSIGNMENTCHAR);
}
return result;
}
@Override
public void updateSyntax(final TTCN3ReparseUpdater reparser, final boolean isDamaged) throws ReParseException {
if (isDamaged) {
lastTimeChecked = null;
boolean enveloped = false;
Location temporalIdentifier = identifier.getLocation();
if (reparser.envelopsDamage(temporalIdentifier) || reparser.isExtending(temporalIdentifier)) {
reparser.extendDamagedRegion(temporalIdentifier);
IIdentifierReparser r = new IdentifierReparser(reparser);
int result = r.parseAndSetNameChanged();
identifier = r.getIdentifier();
// damage handled
if (result == 0 && identifier != null) {
enveloped = true;
} else {
throw new ReParseException(result);
}
}
if (type != null) {
if (enveloped) {
type.updateSyntax(reparser, false);
reparser.updateLocation(type.getLocation());
} else if (reparser.envelopsDamage(type.getLocation())) {
type.updateSyntax(reparser, true);
enveloped = true;
reparser.updateLocation(type.getLocation());
}
}
if (initialValue != null) {
if (enveloped) {
initialValue.updateSyntax(reparser, false);
reparser.updateLocation(initialValue.getLocation());
} else if (reparser.envelopsDamage(initialValue.getLocation())) {
initialValue.updateSyntax(reparser, true);
enveloped = true;
reparser.updateLocation(initialValue.getLocation());
}
}
if (withAttributesPath != null) {
if (enveloped) {
withAttributesPath.updateSyntax(reparser, false);
reparser.updateLocation(withAttributesPath.getLocation());
} else if (reparser.envelopsDamage(withAttributesPath.getLocation())) {
withAttributesPath.updateSyntax(reparser, true);
enveloped = true;
reparser.updateLocation(withAttributesPath.getLocation());
}
}
if (!enveloped) {
throw new ReParseException();
}
return;
}
reparser.updateLocation(identifier.getLocation());
if (type != null) {
type.updateSyntax(reparser, false);
reparser.updateLocation(type.getLocation());
}
if (initialValue != null) {
initialValue.updateSyntax(reparser, false);
reparser.updateLocation(initialValue.getLocation());
}
if (withAttributesPath != null) {
withAttributesPath.updateSyntax(reparser, false);
reparser.updateLocation(withAttributesPath.getLocation());
}
}
@Override
public void findReferences(final ReferenceFinder referenceFinder, final List<Hit> foundIdentifiers) {
super.findReferences(referenceFinder, foundIdentifiers);
if (type != null) {
type.findReferences(referenceFinder, foundIdentifiers);
}
if (initialValue != null) {
initialValue.findReferences(referenceFinder, foundIdentifiers);
}
}
@Override
protected boolean memberAccept(final ASTVisitor v) {
if (!super.memberAccept(v)) {
return false;
}
if (type != null && !type.accept(v)) {
return false;
}
if (initialValue != null && !initialValue.accept(v)) {
return false;
}
return true;
}
public boolean getWritten() {
return wasAssigned;
}
} |
package it.unitn.vanguard.reminiscence;
import it.unitn.vanguard.reminiscence.utils.FinalFunctionsUtilities;
import java.util.Calendar;
import java.util.Locale;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
public class DataNascitaActivity extends Activity {
private Context context;
private String day, month, year;
// Date values
private int dayValue = 1, monthValue = 0, yearValue = 1940;
private int maxYear, maxDay, maxMonth;
// Day views
private Button btnDayUp, btnDayDown;
private TextView txtDay;
// Month Views
private Button btnMonthUp, btnMonthDown;
private TextView txtMonth;
// Year Views
private Button btnYearUp, btnYearDown;
private TextView txtYear;
// Back - Confirm Buttons
private Button btnBack, arrowBackBtn;
private Button btnConfirm, arrowConfirmBtn;
private String[] mesi = {"Gennaio","Febbraio","Marzo","Aprile","Maggio","Giugno",
"Luglio","Agosto","Settembre","Ottobre","Novembre","Dicembre"};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = getApplicationContext();
String language = FinalFunctionsUtilities.getSharedPreferences("language", context);
FinalFunctionsUtilities.switchLanguage(new Locale(language), context);
setContentView(R.layout.activity_data_nascita);
initializeButtons();
initializeVars();
initializeListeners();
//getting current day, month and year
Calendar rightNow = Calendar.getInstance();
maxYear = rightNow.get(Calendar.YEAR);
maxMonth = rightNow.get(Calendar.MONTH);
maxDay = rightNow.get(Calendar.DATE);
}
private void initializeVars() {
Context context = getApplicationContext();
day = FinalFunctionsUtilities.getSharedPreferences("day", context);
month = FinalFunctionsUtilities.getSharedPreferences("month", context);
year = FinalFunctionsUtilities.getSharedPreferences("year", context);
//inizializza le variabili
dayValue = Integer.parseInt(day);
yearValue = Integer.parseInt(year);
for(int i = 0; i < mesi.length; i++){
if(mesi[i].equals(month)){
monthValue = i;
}
}
//setta i campi di testo
txtDay.setText(day);
txtMonth.setText(month);
txtYear.setText(year);
}
private void initializeButtons() {
//add day buttons
btnDayUp = (Button) findViewById(R.id.btn_day_up);
btnDayDown = (Button) findViewById(R.id.btn_day_down);
txtDay = (TextView) findViewById(R.id.txt_day);
//add month buttons
btnMonthUp = (Button) findViewById(R.id.btn_month_up);
btnMonthDown = (Button) findViewById(R.id.btn_month_down);
txtMonth = (TextView) findViewById(R.id.txt_month);
//add year buttons
btnYearUp = (Button) findViewById(R.id.btn_year_up);
btnYearDown = (Button) findViewById(R.id.btn_year_down);
txtYear = (TextView) findViewById(R.id.txt_year);
btnBack = (Button) findViewById(R.id.datanascita_back_btn);
btnConfirm = (Button) findViewById(R.id.datanascita_confirm_btn);
arrowConfirmBtn = (Button) findViewById(R.id.datanascita_arrow_confirm_btn);
arrowBackBtn = (Button) findViewById(R.id.datanascita_arrow_back_btn);
}
private void initializeListeners() {
// DAY UP-DOWN EVENTS
btnDayUp.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
dayValue
dayValue = FinalFunctionsUtilities.valiDate(dayValue, monthValue, yearValue);
if(FinalFunctionsUtilities.isOverCurrentDate(dayValue, monthValue, yearValue, maxDay, maxMonth, maxYear)){
currentDateMsg();
}
else { txtDay.setText(String.valueOf(dayValue)); }
}
});
btnDayDown.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
dayValue++;
dayValue = FinalFunctionsUtilities.valiDate(dayValue, monthValue, yearValue);
if(FinalFunctionsUtilities.isOverCurrentDate(dayValue, monthValue, yearValue, maxDay, maxMonth, maxYear)){
currentDateMsg();
}
else { txtDay.setText(String.valueOf(dayValue)); }
}
});
// MONTH UP-DOWN EVENTS
btnMonthUp.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if(monthValue == 0) {
monthValue = 11;
}
else {
monthValue
}
if(FinalFunctionsUtilities.isOverCurrentDate(dayValue, monthValue, yearValue, maxDay, maxMonth, maxYear)){
currentDateMsg();
}
else {
dayValue = FinalFunctionsUtilities.valiDate(dayValue, monthValue, yearValue);
txtMonth.setText(mesi[monthValue]);
txtDay.setText(String.valueOf(dayValue));
}
}
});
btnMonthDown.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if(monthValue == 11) {
monthValue = 0;
}
else {
monthValue++;
}
if(FinalFunctionsUtilities.isOverCurrentDate(dayValue, monthValue, yearValue, maxDay, maxMonth, maxYear)){
currentDateMsg();
}
else{
dayValue = FinalFunctionsUtilities.valiDate(dayValue, monthValue, yearValue);
txtMonth.setText(mesi[monthValue]);
txtDay.setText(String.valueOf(dayValue));
}
}
});
// YEAR UP-DOWN EVENTS
btnYearUp.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
yearValue++;
if(FinalFunctionsUtilities.isOverCurrentDate(dayValue, monthValue, yearValue, maxDay, maxMonth, maxYear)){
yearValue
currentDateMsg();
}
else{
txtYear.setText(String.valueOf(yearValue));
}
dayValue = FinalFunctionsUtilities.valiDate(dayValue, monthValue, yearValue);
txtDay.setText(String.valueOf(dayValue));
}
});
btnYearDown.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
yearValue
if(yearValue == maxYear-120) {
txtYear.setText(String.valueOf(yearValue = maxYear-120));
}
else {
txtYear.setText(String.valueOf(yearValue));
}
dayValue = FinalFunctionsUtilities.valiDate(dayValue, monthValue, yearValue);
txtDay.setText(String.valueOf(dayValue));
}
});
OnClickListener onclickback = new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), RegistrationActivity.class);
startActivityForResult(intent, 0);
overridePendingTransition(R.anim.slide_in_left, R.anim.slide_out_right);
finish();
}
};
OnClickListener onclickconfirm = new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent passwordIntent = new Intent(v.getContext(), PasswordActivity.class);
// Get shared preferences
Context context = getApplicationContext();
FinalFunctionsUtilities.setSharedPreferences("day", txtDay.getText().toString(), context);
FinalFunctionsUtilities.setSharedPreferences("month", txtMonth.getText().toString(), context);
FinalFunctionsUtilities.setSharedPreferences("year", txtYear.getText().toString(), context);
startActivityForResult(passwordIntent, 0);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
finish();
}
};
//Button back
btnBack.setOnClickListener(onclickback);
arrowBackBtn.setOnClickListener(onclickback);
// Button confirm
btnConfirm.setOnClickListener(onclickconfirm);
arrowConfirmBtn.setOnClickListener(onclickconfirm);
}
//control on current date (easter egg toast)
void currentDateMsg(){
Context context = getApplicationContext();
CharSequence text = "Davvero? Sei nato nel futuro?";
Toast toast = Toast.makeText(context, text, Toast.LENGTH_SHORT);
toast.show();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.login, menu);
String language = FinalFunctionsUtilities.getSharedPreferences("language", getApplicationContext());
Locale locale = new Locale(language);
if(locale.toString().equals(Locale.ITALIAN.getLanguage()) || locale.toString().equals(locale.ITALY.getLanguage())) {
menu.getItem(0).setIcon(R.drawable.it);
}
else if(locale.toString().equals(Locale.ENGLISH.getLanguage())) {
menu.getItem(0).setIcon(R.drawable.en);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Locale locale = null;
switch (item.getItemId()) {
case R.id.action_language_it: { locale = Locale.ITALY; break; }
case R.id.action_language_en: { locale = Locale.ENGLISH; break; }
}
if(locale != null && FinalFunctionsUtilities.switchLanguage(locale, context)) {
// Refresh activity
finish();
startActivity(getIntent());
}
return true;
}
} |
package org.metaborg.runtime.task.evaluation;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import org.metaborg.runtime.task.ITaskEngine;
import org.metaborg.runtime.task.Task;
import org.metaborg.runtime.task.collection.BidirectionalLinkedHashMultimap;
import org.metaborg.runtime.task.collection.BidirectionalSetMultimap;
import org.spoofax.interpreter.core.IContext;
import org.spoofax.interpreter.core.Tools;
import org.spoofax.interpreter.stratego.Strategy;
import org.spoofax.interpreter.terms.IStrategoAppl;
import org.spoofax.interpreter.terms.IStrategoConstructor;
import org.spoofax.interpreter.terms.IStrategoList;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.spoofax.interpreter.terms.IStrategoTuple;
import org.spoofax.interpreter.terms.ITermFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class TaskEvaluationQueue implements ITaskEvaluationQueue, ITaskEvaluationFrontend {
private final ITaskEngine taskEngine;
private final ITermFactory factory;
/** Queue of task that are scheduled for evaluation. */
private final Queue<IStrategoTerm> evaluationQueue = Lists.newLinkedList();
/** Set of tasks in the queue. **/
private final Set<IStrategoTerm> queued = Sets.newHashSet();
/** Dependencies of tasks which are updated during evaluation. */
private final BidirectionalSetMultimap<IStrategoTerm, IStrategoTerm> runtimeDependencies =
BidirectionalLinkedHashMultimap.create();
/** Maps the constructor of a task to the evaluator that can evaluate the task. */
private final Map<IStrategoConstructor, ITaskEvaluator> taskEvaluators = Maps.newLinkedHashMap();
/** The default task evaluator that is used to evaluate tasks for which there is no specific evaluator. */
private final ITaskEvaluator defaultTaskEvaluator;
private Set<IStrategoTerm> scheduled;
private final Set<IStrategoTerm> skipped = Sets.newHashSet();
private final Set<IStrategoTerm> evaluated = Sets.newHashSet();
public TaskEvaluationQueue(ITaskEngine taskEngine, ITermFactory factory, ITaskEvaluator defaultTaskEvaluator) {
this.taskEngine = taskEngine;
this.factory = factory;
this.defaultTaskEvaluator = defaultTaskEvaluator;
}
public void queue(IStrategoTerm taskID) {
if(!queued.contains(taskID)) {
evaluationQueue.add(taskID);
queued.add(taskID);
}
}
public void queueOrDefer(IStrategoTerm taskID) {
final Iterable<IStrategoTerm> dependencies = taskEngine.getDependencies(taskID);
final Set<IStrategoTerm> dependenciesSet = Sets.newHashSet(dependencies);
// TODO: this could be done in constant time if task engine keeps a set of solved tasks.
for(final IStrategoTerm dependency : dependencies) {
if(taskEngine.getTask(dependency).solved()) {
dependenciesSet.remove(dependency);
}
}
if(dependenciesSet.isEmpty()) {
// If the task has no unsolved dependencies, queue it for analysis.
queue(taskID);
} else {
// Fill toRuntimeDependency for scheduled tasks such that solving the task activates their dependent tasks.
runtimeDependencies.putAll(taskID, dependenciesSet);
}
}
public void taskSolved(IStrategoTerm taskID) {
// Retrieve dependent tasks of the solved task.
final Set<IStrategoTerm> dependents = Sets.newHashSet(taskEngine.getDependent(taskID));
dependents.addAll(runtimeDependencies.getInverse(taskID));
for(final IStrategoTerm dependentTaskID : dependents) {
// Remove the dependency to the solved task. If that was the last dependency, schedule the task.
final boolean removed = runtimeDependencies.remove(dependentTaskID, taskID);
if(removed && runtimeDependencies.get(dependentTaskID).size() == 0
&& !taskEngine.getTask(dependentTaskID).solved())
queue(dependentTaskID);
}
}
public void taskSkipped(IStrategoTerm taskID) {
scheduled.remove(taskID);
skipped.add(taskID);
}
public void taskDelayed(IStrategoTerm taskID, IStrategoList dependencies) {
TaskEvaluationDebugging.debugDelayedDependecy(taskEngine, taskID, dependencies);
// Sets the runtime dependencies for a task to the given dependency list.
runtimeDependencies.removeAll(taskID);
for(final IStrategoTerm dependency : dependencies)
runtimeDependencies.put(taskID, dependency);
}
public void addRuntimeDependency(IStrategoTerm taskID, IStrategoTerm dependencyTaskID) {
runtimeDependencies.put(taskID, dependencyTaskID);
}
public void removeRuntimeDependency(IStrategoTerm taskID, IStrategoTerm dependencyTaskID) {
runtimeDependencies.remove(taskID, dependencyTaskID);
}
public void addTaskEvaluator(IStrategoConstructor constructor, ITaskEvaluator taskEvaluator) {
if(taskEvaluators.put(constructor, taskEvaluator) != null) {
throw new RuntimeException("Task evaluator for " + constructor + " already exists.");
}
}
public IStrategoTuple evaluate(Set<IStrategoTerm> scheduled, IContext context, Strategy insert, Strategy perform) {
try {
this.scheduled = scheduled;
// Queue tasks and evaluate them for each specific task evaluator.
for(ITaskEvaluator taskEvaluator : taskEvaluators.values()) {
taskEvaluator.queue(taskEngine, this, this.scheduled);
evaluateQueuedTasks(context, insert, perform);
}
// Evaluate the remaining tasks with the default task evaluator.
defaultTaskEvaluator.queue(taskEngine, this, this.scheduled);
evaluateQueuedTasks(context, insert, perform);
// Debug unevaluated tasks if debugging is enabled.
TaskEvaluationDebugging.debugUnevaluated(taskEngine, this.scheduled, runtimeDependencies);
// Return evaluated, skipped and unevaluated task identifiers.
return factory.makeTuple(factory.makeList(evaluated), factory.makeList(skipped),
factory.makeList(this.scheduled));
} finally {
reset();
}
}
public void reset() {
evaluationQueue.clear();
queued.clear();
runtimeDependencies.clear();
scheduled = null;
skipped.clear();
evaluated.clear();
for(ITaskEvaluator evaluator : taskEvaluators.values())
evaluator.reset();
defaultTaskEvaluator.reset();
}
/**
* Evaluates queued tasks and updates the scheduled and evaluated sets.
*/
private void evaluateQueuedTasks(IContext context, Strategy insert, Strategy perform) {
// Evaluate each task in the queue.
for(IStrategoTerm taskID; (taskID = evaluationQueue.poll()) != null;) {
final Task task = taskEngine.getTask(taskID);
evaluated.add(taskID);
scheduled.remove(taskID);
queued.remove(taskID);
// Clean up data for this task again, since a task may be scheduled multiple times. A re-schedule should
// overwrite previous data.
taskEngine.invalidate(taskID);
evaluateTask(taskID, task, context, insert, perform);
}
}
/**
* Evaluates given task using a specific or default task evaluator.
*/
private void evaluateTask(IStrategoTerm taskID, Task task, IContext context, Strategy insert, Strategy perform) {
ITaskEvaluator taskEvaluator;
final IStrategoTerm instruction = task.instruction;
if(!Tools.isTermAppl(instruction)) {
taskEvaluator = defaultTaskEvaluator;
} else {
taskEvaluator = taskEvaluators.get(((IStrategoAppl)instruction).getConstructor());
if(taskEvaluator == null)
taskEvaluator = defaultTaskEvaluator;
}
taskEvaluator.evaluate(taskID, task, taskEngine, this, context, insert, perform);
}
} |
package swift.indigo;
import java.util.LinkedList;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.ConsoleHandler;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import swift.application.test.TestsUtil;
import swift.clocks.Timestamp;
import swift.indigo.proto.AcquireResourcesReply;
import swift.indigo.proto.AcquireResourcesReply.AcquireReply;
import swift.indigo.proto.AcquireResourcesRequest;
import swift.indigo.proto.ReleaseResourcesRequest;
import swift.indigo.proto.TransferResourcesRequest;
import swift.utils.LogSiteFormatter;
import sys.net.api.Endpoint;
import sys.net.api.Envelope;
import sys.net.api.Service;
import sys.utils.Args;
import sys.utils.ConcurrentHashSet;
import sys.utils.Profiler;
import sys.utils.Threading;
public class ResourceManagerNode implements ReservationsProtocolHandler {
static final int DUPLICATE_TRANSFER_FILTER_WINDOW = 100;
protected static final long DEFAULT_QUEUE_PROCESSING_WAIT_TIME = 1;
private static final int DEFAULT_REQUEST_TRANSFER_RATIO = 3;
private static final int nWorkers = 10;
private IndigoResourceManager manager;
// Incoming requests
private Queue<IndigoOperation> incomingRequestsQueue;
private transient PriorityQueue<TransferResourcesRequest> transferRequestsQueue;
private boolean active;
private Service stub;
private Map<Timestamp, AcquireResourcesReply> replies = new ConcurrentHashMap<Timestamp, AcquireResourcesReply>();
private IndigoSequencerAndResourceManager sequencer;
private ResourceManagerNode thisManager = this;
private Set<IndigoOperation> waitingIndex;
private Map<Timestamp, IndigoOperation> alreadyProcessedTransfers;
private ExecutorService workers;
private static Profiler profiler;
private static String profilerName = "ManagerProfile";
private Logger logger;
public ResourceManagerNode(IndigoSequencerAndResourceManager sequencer, Endpoint surrogate, final Map<String, Endpoint> endpoints) {
// Outgoing transfers queue
Queue<TransferResourcesRequest> outgoingMessages = new LinkedList<>();
// Incoming transfer requests are ordered by priority: promotes
// exclusive_lock operations
// and messages with smaller requests (size of requests was not tested)
this.transferRequestsQueue = new PriorityQueue<TransferResourcesRequest>();
// Incoming messages are ordered by FIFO order
this.incomingRequestsQueue = new ConcurrentLinkedQueue<IndigoOperation>();
this.waitingIndex = new ConcurrentHashSet<IndigoOperation>();
this.alreadyProcessedTransfers = new ConcurrentHashMap<Timestamp, IndigoOperation>();
this.workers = Executors.newFixedThreadPool(nWorkers);
this.manager = new IndigoResourceManager(sequencer, surrogate, endpoints, outgoingMessages);
this.stub = sequencer.stub;
this.sequencer = sequencer;
this.active = true;
initLogging();
final TransferFirstMessageBalacing messageBalancing = new TransferFirstMessageBalacing(incomingRequestsQueue, transferRequestsQueue);
ConcurrentHashMap<String, Long> recentTransfers = new ConcurrentHashMap<>();
// Incoming requests processor thread
new Thread(() -> {
while (active) {
IndigoOperation request = messageBalancing.nextOp();
if (request != null)
workers.execute(() -> {
request.deliverTo(thisManager);
});
else
Threading.sleep(DEFAULT_QUEUE_PROCESSING_WAIT_TIME);
}
}).start();
new Thread(() -> {
while (active) {
TransferResourcesRequest request;
synchronized (outgoingMessages) {
while (outgoingMessages.isEmpty())
Threading.waitOn(outgoingMessages, 10 * DEFAULT_QUEUE_PROCESSING_WAIT_TIME);
request = outgoingMessages.poll();
}
if (request != null) {
String key = request.key();
long now = System.currentTimeMillis();
Long ts = recentTransfers.get(key);
if (ts == null || (now - ts) > DUPLICATE_TRANSFER_FILTER_WINDOW) {
recentTransfers.put(key, now);
Endpoint endpoint = endpoints.get(request.getDestination());
stub.send(endpoint, request);
}
}
}
}).start();
}
public void process(TransferResourcesRequest request) {
if (logger.isLoggable(Level.INFO)) {
logger.info("Processing TransferResourcesRequest: " + request);
}
TRANSFER_STATUS reply = manager.transferResources(request);
// Never keep reply
// if (reply.hasTransferred()) {
// alreadyProcessedTransfers.put(request.getClientTs(), request);
waitingIndex.remove(request);
if (logger.isLoggable(Level.INFO)) {
logger.info("Finished TransferResourcesRequest: " + request + " Reply: " + reply);
}
}
public void process(ReleaseResourcesRequest request) {
if (logger.isLoggable(Level.INFO)) {
logger.info("Processing ReleaseResourcesRequest " + request);
}
long opId = profiler.startOp(profilerName, "release");
Timestamp ts = request.getClientTs();
AcquireResourcesReply arr = replies.get(ts);
if (arr != null && !arr.isReleased()) {
if (arr.acquiredResources()) {
manager.releaseResources(arr);
arr.setReleased();
waitingIndex.remove(request);
} else {
if (logger.isLoggable(Level.WARNING))
logger.warning("Trying to release but did not get resources: exiting, should not happen " + request);
System.exit(0);
}
if (logger.isLoggable(Level.INFO))
logger.info("Finished ReleaseResourcesRequest" + request);
}
profiler.endOp(profilerName, opId);
}
public void processWithReply(Envelope conn, AcquireResourcesRequest request) {
long opId = profiler.startOp(profilerName, "acquire");
AcquireResourcesReply reply = null;
if (logger.isLoggable(Level.INFO))
logger.info("Processing AcquireResourcesRequest " + request);
reply = manager.acquireResources(request);
if (reply.acquiredStatus().equals(AcquireReply.YES)) {
replies.put(request.getClientTs(), reply);
}
if (logger.isLoggable(Level.INFO))
logger.info("Finished AcquireResourcesRequest " + request + " Reply: " + reply + " " + reply.getSnapshot());
waitingIndex.remove(request);
profiler.endOp(profilerName, opId);
conn.reply(reply);
}
/**
* Message handlers
*/
@Override
public void onReceive(Envelope conn, AcquireResourcesRequest request) {
request.setHandler(conn);
AcquireResourcesReply reply = null;
profiler.trackRequest(profilerName, request);
if (request.getResources().size() == 0) {
reply = new AcquireResourcesReply(AcquireReply.NO_RESOURCES, sequencer.clocks.currentClockCopy());
} else {
if (isDuplicate(request)) {
if (logger.isLoggable(Level.INFO))
logger.info("ignore duplicate request: " + request);
// reply = replies.get(request.getClientTs());
} else if (checkAcquireAlreadyProcessed(request) != null) {
if (logger.isLoggable(Level.INFO))
logger.info("Received an already processed message: " + request + " REPLY: " + replies.get(request.getClientTs()));
reply = replies.get(request.getClientTs());
} else {
synchronized (incomingRequestsQueue) {
incomingRequestsQueue.add(request);
}
}
}
if (reply != null)
conn.reply(reply);
}
@Override
public void onReceive(Envelope conn, TransferResourcesRequest request) {
// if (!alreadyProcessedTransfers.containsKey(request.getClientTs())) {
// Check if the message is duplicated
if (!isDuplicate(request)) {
synchronized (transferRequestsQueue) {
transferRequestsQueue.add(request);
}
} else {
logger.info("repeated message");
}
// } else {
// logger.info("already processed request " + request);
}
@Override
public void onReceive(Envelope conn, ReleaseResourcesRequest request) {
AcquireResourcesReply reply = replies.get(request);
if (reply == null || !reply.isReleased()) {
if (!isDuplicate(request)) {
incomingRequestsQueue.add(request);
}
}
}
@Override
public void onReceive(Envelope conn, AcquireResourcesReply request) {
if (logger.isLoggable(Level.WARNING))
logger.warning("RPC " + request.getClass() + " not implemented!");
}
/**
* Private methods
*/
// If messages is already enqueued for processing ignore new request
private boolean isDuplicate(IndigoOperation request) {
if (!waitingIndex.add(request))
return true;
else
return false;
}
private AcquireResourcesReply checkAcquireAlreadyProcessed(AcquireResourcesRequest request) {
AcquireResourcesReply reply = replies.get(request.getClientTs());
if (reply != null && logger.isLoggable(Level.INFO))
logger.info("Reply from cache: " + reply);
if (reply != null)
return reply;
return null;
}
private void initLogging() {
ConsoleHandler handler = new ConsoleHandler();
handler.setFormatter(new LogSiteFormatter(sequencer.siteId));
logger = Logger.getLogger(this.getClass().getName() + "." + sequencer.siteId);
logger.setUseParentHandlers(false);
logger.addHandler(handler);
Logger logger = Logger.getLogger(profilerName);
profiler = Profiler.getInstance();
if (logger.isLoggable(Level.FINEST)) {
FileHandler fileTxt;
try {
String resultsDir = Args.valueOf("-results_dir", ".");
String siteId = Args.valueOf("-siteId", "GLOBAL");
String suffix = Args.valueOf("-fileNameSuffix", "");
fileTxt = new FileHandler(resultsDir + "/manager_profiler" + "_" + siteId + suffix + ".log");
fileTxt.setFormatter(new java.util.logging.Formatter() {
@Override
public String format(LogRecord record) {
return record.getMessage() + "\n";
}
});
logger.addHandler(fileTxt);
profiler.printMessage(profilerName, TestsUtil.dumpArgs());
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
}
profiler.printHeaderWithCustomFields(profilerName);
}
}
class SimpleMessageBalacing {
enum OPType {
TRANSFER, REQUEST
}
private AtomicInteger transfers;
private AtomicInteger requests;
private final int ratio;
private Queue<IndigoOperation> requestQueue;
private Queue<TransferResourcesRequest> transferQueue;
public SimpleMessageBalacing(int requestTransferRatio, Queue<IndigoOperation> requestQueue, Queue<TransferResourcesRequest> transferQueue) {
this.ratio = requestTransferRatio;
this.requestQueue = requestQueue;
this.transferQueue = transferQueue;
this.requests = new AtomicInteger();
this.transfers = new AtomicInteger();
}
private void registerOp(OPType op) {
int count;
switch (op) {
case TRANSFER :
count = transfers.incrementAndGet();
transfers.set(0);
break;
case REQUEST :
count = requests.incrementAndGet();
if (count == ratio)
requests.set(0);
break;
}
}
public IndigoOperation nextOp() {
int nRequests, nTransfers;
synchronized (this) {
nRequests = requests.get();
nTransfers = transfers.get();
}
if (requestQueue.size() > 0 && (nRequests - nTransfers <= ratio || transferQueue.size() == 0)) {
registerOp(OPType.REQUEST);
return requestQueue.remove();
} else if (transferQueue.size() > 0) {
registerOp(OPType.TRANSFER);
synchronized (transferQueue) {
return transferQueue.remove();
}
} else
return null;
}
public String toString() {
return "REQ: " + requestQueue + " TRANS: " + transferQueue;
}
}
class TransferFirstMessageBalacing {
enum OPType {
TRANSFER, REQUEST
}
private Queue<IndigoOperation> requestQueue;
private Queue<TransferResourcesRequest> transferQueue;
public TransferFirstMessageBalacing(Queue<IndigoOperation> requestQueue, Queue<TransferResourcesRequest> transferQueue) {
this.requestQueue = requestQueue;
this.transferQueue = transferQueue;
}
public IndigoOperation nextOp() {
IndigoOperation op = null;
if (transferQueue.size() > 0) {
synchronized (transferQueue) {
op = transferQueue.remove();
}
} else if (requestQueue.size() > 0) {
synchronized (requestQueue) {
op = requestQueue.remove();
}
}
return op;
}
public String toString() {
return "REQ: " + requestQueue + " TRANS: " + transferQueue;
}
} |
package magpie.models.classification;
import java.util.List;
import magpie.models.interfaces.WekaModel;
import magpie.data.Dataset;
import magpie.user.CommandHandler;
import magpie.utility.WekaUtility;
import weka.classifiers.AbstractClassifier;
import weka.core.Instances;
/**
* Classifier that uses Weka
*
* <usage><p><b>Usage</b>: <Weka classifier> [<classifier ptions...>]
* <br><pr><i>Weka classifier</i>: Name of a Weka classifier model (i.e. trees.REPTree). "?" to list options
* <br><pr><i>classifier options</i>: Any options for that model</usage>
*
* @author Logan Ward
* @version 0.1
*/
public class WekaClassifier extends BaseClassifier implements WekaModel {
/** Link to Weka-based model */
public AbstractClassifier Model;
/** Whether model type has been defined */
public boolean model_defined=false;
/** Name of model type currently in use */
protected String Model_Type;
/** Options supplied when instantiating Model */
protected String[] Model_Options;
/** Create a Weka model with a specified model and options
*
* @param model_type Model type (ie trees.J48)
* @param options Options for the model
*/
public WekaClassifier(String model_type, String[] options) throws Exception {
super();
setModel(model_type, options);
}
/**
* Create a WekaClassifier using a "rules.ZeroR" model
*/
public WekaClassifier() throws Exception {
super();
setModel("rules.ZeroR", null);
};
@Override
public void setOptions(List OptionsObj) throws Exception {
String[] Options = CommandHandler.convertCommandToString(OptionsObj);
try {
if (Options.length != 0) {
String ModelName = Options[0];
String[] ModelOptions = null;
if (Options.length > 1) {
ModelOptions = new String[Options.length - 1];
System.arraycopy(Options, 1, ModelOptions, 0, ModelOptions.length);
}
setModel(ModelName, ModelOptions);
}
} catch (Exception e) {
throw new Exception(printUsage());
}
}
@Override
public String printUsage() {
return "Usage: <Weka Classifier Name> [<Weka Classifier Options...>]";
}
@Override public WekaClassifier clone() {
WekaUtility.importWekaHome();
WekaClassifier output = (WekaClassifier) super.clone();
try { output.Model = (AbstractClassifier) AbstractClassifier.makeCopy(Model); }
catch (Exception e) { throw new Error("Cloning model failed due to: "+e); }
output.model_defined = model_defined;
return output;
}
@Override
public final void setModel(String model_type, String[] options) throws Exception {
Model = WekaUtility.instantiateWekaModel(model_type, options);
model_defined = true;
Model_Type = model_type;
if (options != null )
Model_Options = Model.getOptions();
}
@Override
public String getModelName() { return Model_Type; }
@Override
public String[] getModelOptions() { return Model_Options; }
@Override
public String getModelFull() {
String out = Model_Type;
for (String Model_Option : Model_Options) {
out += " " + Model_Option;
}
return out;
}
@Override public String toString() { return Model.toString(); }
@Override protected void train_protected(Dataset TrainingData) {
try {
Instances wekadata = TrainingData.transferToWeka(true, classIsDiscrete());
Model.buildClassifier(wekadata);
TrainingData.restoreAttributes(wekadata);
}
catch (Exception e) {
throw new Error(e);
}
}
@Override public void run_protected(Dataset TestData) {
try {
Instances wekadata = TestData.transferToWeka(false, classIsDiscrete());
if (classIsDiscrete()) {
double[][] probs = new double[TestData.NEntries()][TestData.NClasses()];
for (int i=0; i<wekadata.numInstances(); i++) {
probs[i]=Model.distributionForInstance(wekadata.instance(i));
}
TestData.setClassProbabilities(probs);
} else {
double[] prediction = new double [TestData.NEntries()];
for (int i=0; i<wekadata.numInstances(); i++)
prediction[i]=Model.classifyInstance(wekadata.instance(i));
TestData.setPredictedClasses(prediction);
}
TestData.restoreAttributes(wekadata);
} catch (Exception e) {
throw new Error(e);
}
}
@Override
protected String printModel_protected() {
return this.Model.toString();
}
} |
package net.rakugakibox.spring.boot.orika;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import lombok.extern.slf4j.Slf4j;
import ma.glasnost.orika.MapperFacade;
import ma.glasnost.orika.MapperFactory;
import ma.glasnost.orika.impl.DefaultMapperFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* The auto-configuration for Orika.
*/
@Configuration
@ConditionalOnProperty(name = "orika.enabled", matchIfMissing = true)
@EnableConfigurationProperties(OrikaProperties.class)
@Slf4j
public class OrikaAutoConfiguration {
/**
* Creates a {@link DefaultMapperFactory.MapperFactoryBuilder}.
*
* @param orikaProperties the configuration properties for Orika.
* @param mapperFactoryBuilderConfigurers the configurers of {@link DefaultMapperFactory.MapperFactoryBuilder}.
* @return a {@link DefaultMapperFactory.MapperFactoryBuilder}.
*/
@Bean
@ConditionalOnMissingBean
public DefaultMapperFactory.MapperFactoryBuilder<?, ?> orikaMapperFactoryBuilder(
OrikaProperties orikaProperties,
Optional<List<OrikaMapperFactoryBuilderConfigurer>> mapperFactoryBuilderConfigurers
) {
DefaultMapperFactory.Builder mapperFactoryBuilder = new DefaultMapperFactory.Builder();
orikaProperties.getUseBuiltinConverters().ifPresent(mapperFactoryBuilder::useBuiltinConverters);
orikaProperties.getUseAutoMapping().ifPresent(mapperFactoryBuilder::useAutoMapping);
orikaProperties.getMapNulls().ifPresent(mapperFactoryBuilder::mapNulls);
orikaProperties.getDumpStateOnException().ifPresent(mapperFactoryBuilder::dumpStateOnException);
orikaProperties.getFavorExtension().ifPresent(mapperFactoryBuilder::favorExtension);
orikaProperties.getCaptureFieldContext().ifPresent(mapperFactoryBuilder::captureFieldContext);
mapperFactoryBuilderConfigurers
.orElseGet(Collections::emptyList)
.forEach(configurer -> configurer.configure(mapperFactoryBuilder));
log.debug("Created a DefaultMapperFactory.MapperFactoryBuilder: [{}]", mapperFactoryBuilder);
return mapperFactoryBuilder;
}
/**
* Creates a {@link MapperFactory}.
*
* @param mapperFactoryBuilder the {@link DefaultMapperFactory.MapperFactoryBuilder}.
* @param mapperFactoryConfigurers the configurers of {@link MapperFactory}.
* @return a {@link MapperFactory}.
*/
@Bean
@ConditionalOnMissingBean
public MapperFactory orikaMapperFactory(
DefaultMapperFactory.MapperFactoryBuilder<?, ?> mapperFactoryBuilder,
Optional<List<OrikaMapperFactoryConfigurer>> mapperFactoryConfigurers
) {
MapperFactory mapperFactory = mapperFactoryBuilder.build();
mapperFactoryConfigurers
.orElseGet(Collections::emptyList)
.forEach(configurer -> configurer.configure(mapperFactory));
log.debug("Created a MapperFactory: [{}]", mapperFactory);
return mapperFactory;
}
/**
* Creates a {@link MapperFacade}.
*
* @param mapperFactory the {@link MapperFactory}.
* @return a {@link MapperFacade}.
*/
@Bean
@ConditionalOnMissingBean
public MapperFacade orikaMapperFacade(MapperFactory mapperFactory) {
MapperFacade mapperFacade = mapperFactory.getMapperFacade();
log.debug("Created a MapperFacade: [{}]", mapperFacade);
return mapperFacade;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.