answer
stringlengths
17
10.2M
package gov.nih.nci.calab.dto.workflow; import gov.nih.nci.calab.service.util.CalabConstants; import gov.nih.nci.calab.service.util.PropertyReader; import java.io.File; import java.util.Date; /** * @author zengje * */ public class FileBean { private String id = ""; private String path = ""; private String filename = ""; private String createDateStr = ""; private String fileSubmitter = ""; private String fileMaskStatus = ""; private Date createdDate; private String shortFilename=""; public FileBean() { super(); // TODO Auto-generated constructor stub } // used in WorkflowResultBean public FileBean(String path, String fileSubmissionDate, String fileSubmitter, String fileMaskStatus) { this.path = path; this.createDateStr = fileSubmissionDate; this.fileSubmitter = fileSubmitter; this.fileMaskStatus = fileMaskStatus; this.filename = getFileName(path); } public FileBean(String id, String path) { super(); // TODO Auto-generated constructor stub this.id = id; this.path = path; this.filename = getFileName(path); } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getPath() { return path; } public void setPath(String path) { this.path = path; this.filename = getFileName(path); } public String getFilename() { return filename; } // public void setFilename(String filename) { // this.filename = filename; private String getFileName(String path) { String[] tokens = path.split("/"); return tokens[tokens.length-1]; } public String getFileMaskStatus() { return fileMaskStatus; } public void setFileMaskStatus(String fileMaskStatus) { this.fileMaskStatus = fileMaskStatus; } public String getCreateDateStr() { return createDateStr; } public void setCreateDateStr(String fileSubmissionDate) { this.createDateStr = fileSubmissionDate; } public String getFileSubmitter() { return fileSubmitter; } public void setFileSubmitter(String fileSubmitter) { this.fileSubmitter = fileSubmitter; } public Date getCreatedDate() { return createdDate; } public void setCreatedDate(Date createdDate) { this.createdDate = createdDate; } public String getShortFilename() { return shortFilename; } public void setShortFilename(String shortFileName) { this.shortFilename = shortFileName; } }
package nom.bdezonia.zorbage.algorithm; import nom.bdezonia.zorbage.procedure.Procedure1; import nom.bdezonia.zorbage.algebra.Algebra; import nom.bdezonia.zorbage.datasource.IndexedDataSource; /** * * @author Barry DeZonia * */ public class Fill { // do not instantiate private Fill() {} /** * Fill a target data source in a non-threaded fashion. This is * usually needed when you want to fill a virtual structure. * * @param algebra * @param storage * @param value */ public static <T extends Algebra<T,U>,U> void compute(T algebra, U value, IndexedDataSource<U> storage) { long size = storage.size(); for (long i = 0; i < size; i++) { storage.set(i, value); } } /** * * @param algebra * @param storage * @param proc */ public static <T extends Algebra<T,U>,U> void compute(T algebra, Procedure1<U> proc, IndexedDataSource<U> storage) { Transform1.compute(algebra, proc, storage); } }
package org.agorava.cdi; import org.agorava.api.atinject.Current; import org.agorava.api.oauth.OAuthSession; import org.agorava.api.storage.UserSessionRepository; import org.agorava.cdi.deltaspike.DifferentOrNull; import org.agorava.spi.SessionResolver; import org.apache.deltaspike.core.api.exclude.Exclude; import javax.enterprise.context.RequestScoped; import javax.enterprise.inject.Produces; import javax.inject.Inject; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * @author Antoine Sabot-Durand */ @Exclude(onExpression = InApplicationProducer.RESOLVER + ",rest", interpretedBy = DifferentOrNull.class) @RequestScoped public class RestSessionProducer implements SessionResolver { @Override @Produces public OAuthSession getCurrentSession(@Current UserSessionRepository repository) { return repository.getCurrent(); } @Inject @Current private UserSessionRepository repo; public OAuthSession resolveSession(String id) { repo.setCurrent(repo.get(id)); return repo.getCurrent(); } public Iterator<OAuthSession> iteratorOnSessions(String service) { final List<OAuthSession> sessionForServices = new ArrayList<OAuthSession>(); for (OAuthSession session : repo) { if (session.getName().equals(service)) sessionForServices.add(session); } return new Iterator<OAuthSession>() { Iterator<OAuthSession> iter = sessionForServices.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public OAuthSession next() { OAuthSession res = iter.next(); repo.setCurrent(res); return res; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }
package net.estinet.gFeatures.Feature.FusionPlay; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Queue; import net.estinet.gFeatures.Events; import net.estinet.gFeatures.Retrieval; import net.estinet.gFeatures.gFeature; import net.estinet.gFeatures.ClioteSky.API.CliotePing; import net.md_5.bungee.BungeeCord; import net.md_5.bungee.api.config.ServerInfo; import net.md_5.bungee.api.connection.ProxiedPlayer; import net.md_5.bungee.api.event.PlayerHandshakeEvent; import net.md_5.bungee.api.plugin.Event; public class FusionPlay extends gFeature implements Events{ private static List<FusionCon> connections = new ArrayList<>(); public static List<Integer> usedID = new ArrayList<>(); public static Queue<FusionCon> queueConnections = new LinkedList<FusionCon>(); EventHub eh = new EventHub(); public FusionPlay(String featurename, String d) { super(featurename, d); } @Override public void enable(){ Enable.onEnable(); } @Override public void disable(){ Disable.onDisable(); } @Override public void eventTrigger(Event event) { if(event.getClass().getName().substring(26, event.getClass().getName().length()).equalsIgnoreCase("playerhandshakeevent")){ eh.onPlayerJoin((PlayerHandshakeEvent)event); } } @Override @Retrieval public void onPlayerHandshake(){} public static List<FusionCon> getConnections(){ return connections; } public static void addConnection(FusionCon fc){ connections.add(fc); } public static void removeConnection(FusionCon fc){ connections.remove(fc); } public static void removeConnection(String clioteName){ connections.remove(getConnectionArrayID(clioteName)); } public static void addID(int id){ if(!usedID.contains(id)){ usedID.add(id); } } public static List<FusionCon> getConnectionPair(int id){ List<FusionCon> list = new ArrayList<>(); for(FusionCon fc : connections){ if(fc.getID() == id){ list.add(fc); } } return list; } public static boolean hasConnection(String clioteName){ for(FusionCon fc : connections){ if(fc.getClioteName().equals(clioteName)){ return true; } } return false; } public static int getConnectionArrayID(String clioteName){ for(int i = 0; i < connections.size(); i++){ FusionCon fc = connections.get(i); if(fc.getClioteName().equals(clioteName)){ return i; } } return -1; } public static void replaceConnection(String clioteName){ FusionPlay.getConnections().get(FusionPlay.getConnectionArrayID(clioteName)).setStatus(FusionStatus.OFFLINE); int id = FusionPlay.getConnections().get(FusionPlay.getConnectionArrayID(clioteName)).getID(); FusionPlay.getConnections().get(FusionPlay.getConnectionArrayID(clioteName)).setID(-1); FusionCon fc = queueConnections.peek(); if(!connections.get(getConnectionArrayID(clioteName)).getCurrentType().equals(fc.getCurrentType())){ queueConnections.poll(); CliotePing cp = new CliotePing(); cp.sendMessage("fusionplay start", fc.getClioteName()); //PLZ IMPLEMENT fc.setStatus(FusionStatus.WAITING); fc.setID(id); ServerInfo cur = BungeeCord.getInstance().getServerInfo(clioteName); ServerInfo si = BungeeCord.getInstance().getServerInfo(fc.getClioteName()); for(ProxiedPlayer pp : cur.getPlayers()){ pp.connect(si); } } else{ Iterator li = queueConnections.iterator(); while(li.hasNext()){ } } } public static boolean isPairedID(int id){ int nums = 0; for(FusionCon fc : connections){ if(fc.getID() == id){ nums++; } } if(nums > 1){ return true; } return false; } }
package org.ajabshahar.platform.models; import lombok.Getter; import lombok.Setter; import lombok.ToString; import javax.persistence.*; import java.sql.Timestamp; import java.util.Calendar; import java.util.Date; import java.util.Set; @Entity @Table(name = "SONG") @NamedQueries({ @NamedQuery( name = "org.ajabshahar.platform.models.Song.findAllOnLandingPage", query = "SELECT p FROM Song p where p.showOnLandingPage=true and p.isAuthoringComplete=true" ) }) @Getter @Setter @ToString public class Song { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private long id; @Column(name = "SHOW_ON_LANDING_PAGE", nullable = true) private Boolean showOnLandingPage; @Column(name = "DURATION", nullable = true) private String duration; @Column(name = "YOUTUBE_VIDEO_ID", nullable = true) private String youtubeVideoId; @Column(name = "SOUNDCLOUD_TRACK_ID", nullable = false) private String soundCloudTrackID; @Column(name = "THUMBNAIL_URL") private String thumbnail_url; @Column(name = "DOWNLOAD_URL") private String download_url; @Column(name = "ABOUT") private String about; @Column(name = "IS_AUTHORING_COMPLETE") private Boolean isAuthoringComplete; @Column(name = "published_date") private Timestamp publishedDate; @ManyToMany(fetch = FetchType.EAGER) @JoinTable(name = "SONG_SINGER", joinColumns = {@JoinColumn(name = "SONG_ID", referencedColumnName = "ID")}, inverseJoinColumns = {@JoinColumn(name = "SINGER_ID", referencedColumnName = "ID")}) private Set<PersonDetails> singers; @ManyToMany(fetch = FetchType.EAGER) @JoinTable(name = "SONG_GENRE", joinColumns = {@JoinColumn(name = "SONG_ID", referencedColumnName = "ID")}, inverseJoinColumns = {@JoinColumn(name = "GENRE_ID", referencedColumnName = "ID")}) private Set<Genre> songGenre; @ManyToMany(fetch = FetchType.EAGER) @JoinTable(name = "SONG_POET", joinColumns = {@JoinColumn(name = "SONG_ID", referencedColumnName = "ID")}, inverseJoinColumns = {@JoinColumn(name = "POET_ID", referencedColumnName = "ID")}) private Set<PersonDetails> poets; @ManyToOne(fetch = FetchType.EAGER) @JoinColumn(name = "SONG_CATEGORY") private Category songCategory; @ManyToOne(fetch = FetchType.EAGER) @JoinColumn(name = "MEDIA_CATEGORY") private Category mediaCategory; @ManyToOne(fetch = FetchType.EAGER) @JoinColumn(name = "UMBRELLA_TITLE_ID") private Title title; @ManyToOne(fetch = FetchType.EAGER) @JoinColumn(name = "SONG_TITLE_ID") private Title songTitle; @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "SONG_TEXT_ID") private SongText songText; @ManyToMany @JoinTable(name = "SONG_WORD", joinColumns = {@JoinColumn(name = "SONG_ID")}, inverseJoinColumns = {@JoinColumn(name = "WORD_ID")}) private Set<Word> words; public void updateFrom(Song song) { Calendar calendar = Calendar.getInstance(); Date now = calendar.getTime(); this.title = song.title; this.songTitle = song.songTitle; this.about = song.about; this.download_url = song.download_url; this.songText = song.songText; this.showOnLandingPage = song.showOnLandingPage; this.duration = song.duration; this.youtubeVideoId = song.youtubeVideoId; this.thumbnail_url = song.thumbnail_url; this.isAuthoringComplete = song.isAuthoringComplete; this.singers = song.singers; this.poets = song.poets; this.songCategory = song.songCategory; this.mediaCategory = song.mediaCategory; this.songGenre = song.songGenre; this.words = song.words; this.soundCloudTrackID = song.soundCloudTrackID; if (song.isAuthoringComplete) { this.publishedDate = new Timestamp(now.getTime()); } } }
package net.sf.jaer.eventprocessing.filter; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.awt.image.BufferedImage; import java.io.BufferedOutputStream; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import java.util.Date; import java.util.Iterator; import java.util.Observable; import java.util.Observer; import java.util.logging.Level; import java.util.logging.Logger; import javax.imageio.ImageIO; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JOptionPane; import net.sf.jaer.Description; import net.sf.jaer.DevelopmentStatus; import net.sf.jaer.chip.AEChip; import net.sf.jaer.chip.Chip2D; import net.sf.jaer.event.ApsDvsEvent; import net.sf.jaer.event.ApsDvsEventPacket; import net.sf.jaer.event.EventPacket; import net.sf.jaer.eventio.AEDataFile; import net.sf.jaer.eventio.AEFileOutputStream; import net.sf.jaer.eventprocessing.EventFilter2D; import net.sf.jaer.graphics.AEFrameChipRenderer; import net.sf.jaer.graphics.ImageDisplay; import net.sf.jaer.graphics.ImageDisplay.Legend; import net.sf.jaer.util.DATFileFilter; import org.opencv.core.Core; import org.opencv.core.CvType; import org.opencv.core.Mat; import org.opencv.core.MatOfRect; import org.opencv.core.Point; import org.opencv.core.Rect; import org.opencv.core.Scalar; import org.opencv.core.Size; import org.opencv.imgproc.Imgproc; import org.opencv.objdetect.CascadeClassifier; import ch.unizh.ini.jaer.projects.davis.frames.ApsFrameExtractor; import eu.seebetter.ini.chips.DavisChip; @Description("Detect Faces with OpenCV and label data for later supervised learning.") @DevelopmentStatus(DevelopmentStatus.Status.Stable) public class FaceDetectionAPS extends EventFilter2D implements Observer /* Observer needed to get change events on chip construction */{ private JFrame apsFrame = null; public ImageDisplay apsDisplay; private DavisChip apsChip = null; private boolean newFrame, useExtRender = false; // useExtRender means using something like OpenCV to render the data. If false, the displayBuffer is displayed private float[] resetBuffer, signalBuffer; /** Raw pixel values from sensor, before conversion, brightness, etc.*/ private float[] displayBuffer; private float[] apsDisplayPixmapBuffer; /** Cooked pixel values, after brightness, contrast, log intensity conversion, etc. */ private float[] displayFrame; // format is RGB triplets indexed by ??? what is this? How different than displayBuffer??? public int width, height, maxADC, maxIDX; private float grayValue; public final float logSafetyOffset = 10000.0f; protected boolean showAPSFrameDisplay = getBoolean("showAPSFrameDisplay", true); private Legend apsDisplayLegend; public float pos_x, pos_y; public int pos_w, pos_h; public Labeled_image[] faces_in_frame; public String string; /** * A PropertyChangeEvent with this value is fired when a new frame has been * completely read. The oldValue is null. The newValue is the float[] * displayFrame that will be rendered. */ public static final String EVENT_NEW_FRAME = AEFrameChipRenderer.EVENT_NEW_FRAME_AVAILBLE; private int lastFrameTimestamp=-1; private BufferedImage taggedImage; static { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); //add library opencv } /** * Data logger for faces */ private boolean face_loggingEnabled = getBoolean("face_loggingEnabled", false); private AEFileOutputStream face_loggingOutputStream; private String face_defaultLoggingFolderName = System.getProperty("user.dir"); // lastLoggingFolder starts off at user.dir which is startup folder "host/java" where .exe launcher lives private String face_loggingFolder = "filterSettings/OpenCV_Nets/Labeled_data"; private File face_loggingFile; private File face_labelFile; private int face_maxLogFileSizeMB = prefs().getInt("DataLogger.maxLogFileSizeMB", 100); private boolean face_rotateFilesEnabled = prefs().getBoolean("DataLogger.rotateFilesEnabled", false); private int face_rotatePeriod = prefs().getInt("DataLogger.rotatePeriod", 7); private long face_bytesWritten = 0; private String face_logFileBaseName = prefs().get("DataLogger.logFileBaseName", ""); private int face_rotationNumber = 0; private boolean face_filenameTimestampEnabled = prefs().getBoolean("DataLogger.filenameTimestampEnabled", true); private File file_labels; private FileWriter fileWritter; private BufferedWriter bufferWritter; //opencv face detection public processor my_processor=new processor(); public Mat webcam_image=new Mat(); @Override public void update(Observable o, Object arg) { if((o instanceof AEChip) && (arg.equals(Chip2D.EVENT_SIZEX) || arg.equals(Chip2D.EVENT_SIZEY))){ initFilter(); } } public static enum Extraction { ResetFrame, SignalFrame, CDSframe }; private boolean invertIntensity = getBoolean("invertIntensity", false); private boolean preBufferFrame = getBoolean("preBufferFrame", true); private boolean realTimeLabelFrame = getBoolean("realTimeLabelFrame", false); private boolean logCompress = getBoolean("logCompress", false); private boolean logDecompress = getBoolean("logDecompress", false); private float displayContrast = getFloat("displayContrast", 1.0f); private float displayBrightness = getFloat("displayBrightness", 0.0f); public Extraction extractionMethod = Extraction.valueOf(getString("extractionMethod", "CDSframe")); public FaceDetectionAPS(AEChip chip) { super(chip); apsDisplay = ImageDisplay.createOpenGLCanvas(); apsFrame = new JFrame("APS Frame"); apsFrame.setPreferredSize(new Dimension(400, 400)); apsFrame.getContentPane().add(apsDisplay, BorderLayout.CENTER); apsFrame.pack(); apsFrame.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { setShowAPSFrameDisplay(false); } }); apsDisplayLegend = apsDisplay.addLegend("", 0, 0); float[] displayColor = new float[3]; displayColor[0] = 1.0f; displayColor[1] = 1.0f; displayColor[2] = 1.0f; apsDisplayLegend.color = displayColor; initFilter(); setPropertyTooltip("invertIntensity", "Inverts grey scale, e.g. for raw samples of signal level"); setPropertyTooltip("preBufferFrame", "Only display and use complete frames; otherwise display APS samples as they arrive"); setPropertyTooltip("realTimeLabelFrame", "Label faces in APS frame in real time"); setPropertyTooltip("logCompress", "Should the displayBuffer be log compressed"); setPropertyTooltip("logDecompress", "Should the logComressed displayBuffer be rendered in log scale (true) or linearly (false)"); setPropertyTooltip("displayContrast", "Gain for the rendering of the APS display"); setPropertyTooltip("displayBrightness", "Offset for the rendering of the APS display"); setPropertyTooltip("extractionMethod", "Method to extract a frame; CDSframe is the final result after subtracting signal from reset frame. Signal and reset frames are the raw sensor output before correlated double sampling."); setPropertyTooltip("showAPSFrameDisplay", "Shows the JFrame frame display if true"); //final String cont = "Control", params = "Parameters"; setPropertyTooltip( "face_loggingEnabled", "Enable to start logging data"); setPropertyTooltip( "face_filenameTimestampEnabled", "adds a timestamp to the filename, but means rotation will not overwrite old data files and will eventually fill disk"); setPropertyTooltip( "face_logFileBaseName", "the base name of the log file - if empty the AEChip class name is used"); setPropertyTooltip( "face_rotatePeriod", "log file rotation period"); setPropertyTooltip( "face_rotateFilesEnabled", "enabling rotates log files over rotatePeriod"); setPropertyTooltip( "face_maxLogFileSizeMB", "logging is stopped when files get larger than this in MB"); setPropertyTooltip( "face_loggingFolder", "directory to store logged data files"); // check lastLoggingFolder to see if it really exists, if not, default to user.dir File lf = new File(face_loggingFolder); if (!lf.exists() || !lf.isDirectory()) { log.warning("face_loggingFolder " + lf + " doesn't exist or isn't a directory, defaulting to " + lf); face_setLoggingFolder(face_defaultLoggingFolderName); } chip.addObserver(this); } @Override public void initFilter() { resetFilter(); } @Override public void resetFilter() { if (DavisChip.class.isAssignableFrom(chip.getClass())) { apsChip = (DavisChip) chip; } else { log.warning("The filter ApsFrameExtractor can only be used for chips that extend the ApsDvsChip class"); } newFrame = false; width = chip.getSizeX(); // note that on initial construction width=0 because this constructor is called while chip is still being built height = chip.getSizeY(); maxIDX = width * height; maxADC = apsChip.getMaxADC(); apsDisplay.setImageSize(width, height); resetBuffer = new float[width * height]; signalBuffer = new float[width * height]; displayFrame = new float[width * height]; displayBuffer = new float[width * height]; apsDisplayPixmapBuffer = new float[3 * width * height]; Arrays.fill(resetBuffer, 0.0f); Arrays.fill(signalBuffer, 0.0f); Arrays.fill(displayFrame, 0.0f); Arrays.fill(displayBuffer, 0.0f); Arrays.fill(apsDisplayPixmapBuffer, 0.0f); } @Override public EventPacket<?> filterPacket(EventPacket<?> in) { ApsDvsEventPacket packet = (ApsDvsEventPacket) in; loglabeledData(packet); checkMaps(); if (packet == null) { return null; } if (packet.getEventClass() != ApsDvsEvent.class) { log.warning("wrong input event class, got " + packet.getEventClass() + " but we need to have " + ApsDvsEvent.class); return null; } Iterator apsItr = packet.fullIterator(); while (apsItr.hasNext()) { ApsDvsEvent e = (ApsDvsEvent) apsItr.next(); if (e.isSampleEvent()) { putAPSevent(e); } } if (showAPSFrameDisplay) { apsDisplay.repaint(); } return in; } synchronized private void loglabeledData(EventPacket eventPacket) { if (eventPacket == null) { return; } // if we are logging data to disk do it here if (face_loggingEnabled) { try { face_loggingOutputStream.writePacket(eventPacket); // log all events face_bytesWritten += eventPacket.getSize(); ///faces_in_frame //faces_in_frame.length fileWritter = new FileWriter(file_labels,true); bufferWritter = new BufferedWriter(fileWritter); for (Labeled_image element : faces_in_frame) { //log.warning("FACE AT X " + element.getloc_x()); //log.warning("FACE AT Y " + element.getloc_y()); eventPacket.getFirstTimestamp(); //for ( EventPacket e:eventPacket ){ int ts = eventPacket.getFirstTimestamp(); //log.warning("TIMESTAMP " + ts); string = String.format("%d\t%f\t%f\t%d\t%d\n", ts, element.getloc_x(),element.getloc_y(), element.getw(), element.geth()); try { bufferWritter.write(string); //bufferWritter.newLine(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } bufferWritter.flush(); bufferWritter.close(); if ((face_bytesWritten >>> 20) > face_maxLogFileSizeMB) { setface_loggingEnabled(false); } } catch (IOException e) { log.warning("while logging data to " + face_loggingFile + " caught " + e + ", will try to close file"); face_loggingEnabled = false; getSupport().firePropertyChange("loggingEnabled", null, false); // try { // face_loggingOutputStream.close(); // log.info("closed logging file " + face_loggingFile); //} catch (IOException e2) { // log.warning("while closing logging file " + face_loggingFile + " caught " + e2); } catch(NullPointerException e) { System.out.print("NullPointerException caught\n"); } } } private void checkMaps() { apsDisplay.checkPixmapAllocation(); if (showAPSFrameDisplay && !apsFrame.isVisible()) { apsFrame.setVisible(true); } } public void putAPSevent(ApsDvsEvent e) { if (!e.isSampleEvent()) { return; } //if(e.isStartOfFrame())timestampFrameStart=e.timestampFrameStart; ApsDvsEvent.ReadoutType type = e.getReadoutType(); float val = e.getAdcSample(); int idx = getIndex(e.x, e.y); if (idx >= maxIDX) { return; } if (e.isStartOfFrame()) { if (newFrame && useExtRender) { log.warning("Acquistion of new frame started even though old frame was never delivered to ext renderer"); } } if (idx < 0) { if (e.isEndOfFrame()) { if (preBufferFrame && (displayBuffer != null) && !useExtRender && showAPSFrameDisplay) { displayPreBuffer(); } if (realTimeLabelFrame && (displayBuffer != null) && !useExtRender && showAPSFrameDisplay) { setPreBufferFrame(true); displayPreBufferTagged(); //here label faces } newFrame = true; lastFrameTimestamp=e.timestamp; getSupport().firePropertyChange(EVENT_NEW_FRAME, null, displayFrame); } return; } switch (type) { case SignalRead: signalBuffer[idx] = val; break; case ResetRead: default: resetBuffer[idx] = val; break; } switch (extractionMethod) { case ResetFrame: displayBuffer[idx] = resetBuffer[idx]; break; case SignalFrame: displayBuffer[idx] = signalBuffer[idx]; break; case CDSframe: default: displayBuffer[idx] = resetBuffer[idx] - signalBuffer[idx]; break; } if (invertIntensity) { displayBuffer[idx] = maxADC - displayBuffer[idx]; } if (logCompress) { displayBuffer[idx] = (float) Math.log(displayBuffer[idx] + logSafetyOffset); } if (logCompress && logDecompress) { grayValue = scaleGrayValue((float) (Math.exp(displayBuffer[idx]) - logSafetyOffset)); } else { grayValue = scaleGrayValue(displayBuffer[idx]); } displayFrame[idx] = grayValue; if (!preBufferFrame && !useExtRender && showAPSFrameDisplay) { apsDisplay.setPixmapGray(e.x, e.y, grayValue); } else { apsDisplayPixmapBuffer[3 * idx] = grayValue; apsDisplayPixmapBuffer[(3 * idx) + 1] = grayValue; apsDisplayPixmapBuffer[(3 * idx) + 2] = grayValue; } } /** convert Mat to BufferedImage, it works with grayscale images or 3byte_bgr * @return BufferedImage (can be dim*3 if input mat is CV_8UC3 or dim*1 if input mat is CV_8UC1) */ public static BufferedImage mat2Img(Mat in, int x, int y) { BufferedImage out; byte[] data = new byte[ x * y * (int)in.elemSize()]; int type; in.get(0, 0, data); if(in.channels() == 1) { type = BufferedImage.TYPE_BYTE_GRAY; } else { type = BufferedImage.TYPE_3BYTE_BGR; } out = new BufferedImage(x, y, type); out.getRaster().setDataElements(0, 0, x, y, data); return out; } /** convert Mat to float, * @return float */ private float[] mat2float(Mat in, int x, int y) { byte[] data = new byte[ x * y * (int)in.elemSize()]; float[] outputFile; outputFile = new float[3 * width * height]; for(int i = 0; i < (3 * width * height); i=i+3) { outputFile[i*3] = data[i] >> 16 ; outputFile[(i*3) + 1] = data[i+1] >> 8; outputFile[(i*3) + 2] = data[i+2] >> 0; } return outputFile; } /** convert BufferedImage to float [3 * width * height], * @return float */ private float[] img2float(BufferedImage in, int x, int y) { float[] outputFile; outputFile = new float[3 * width * height]; //we need to flip it int xa = 0; int ya = 0; for (xa=0; xa < (width / 2); xa++) { for ( ya = 0; ya < height; ya++) { final int l = in.getRGB( width - (xa + 1), ya); final int r = in.getRGB( xa, ya); in.setRGB( xa, ya, l ); in.setRGB( width - (xa + 1), ya, r ); } } //if(in.getType() == BufferedImage.TYPE_INT_RGB) int[] dataBuff = in.getRGB(0, 0, x, y, null, 0, x); for(int i = 0; i < dataBuff.length; i++) { outputFile[i*3] = (dataBuff[dataBuff.length-i-1] >> 16) & 0xFF; //red outputFile[(i*3) + 1] = (dataBuff[dataBuff.length-i-1] >> 8) & 0xFF; //green outputFile[(i*3) + 2] = (dataBuff[dataBuff.length-i-1] >> 0) & 0xFF; //blue outputFile[i*3] = (outputFile[i*3] ) / 255.0f; outputFile[(i*3) + 1] = (outputFile[(i*3) + 1] ) / 255.0f; outputFile[(i*3) + 2] = (outputFile[(i*3) + 2] ) / 255.0f; //(dataBuff[dataBuff.length-i-1] >> 24) alpha } //ArrayUtils.reverse(outputFile); return outputFile; } /** convert BufferedImage to Mat, , it works with grayscale images or 3byte_bgr * @return mat (can be dim*3 if input image is TYPE_INT_RGB or dim*1 if input image is TYPE_BYTE_GRAY) */ public static Mat img2Mat(BufferedImage in, int x, int y) { Mat out; byte[] data; int r, g, b; if(in.getType() == BufferedImage.TYPE_INT_RGB) { out = new Mat(y, x, CvType.CV_8UC3); data = new byte[x * y * (int)out.elemSize()]; int[] dataBuff = in.getRGB(0, 0, x, y, null, 0, x); for(int i = 0; i < dataBuff.length; i++) { data[i*3] = (byte) ((dataBuff[i] >> 16) & 0xFF); data[(i*3) + 1] = (byte) ((dataBuff[i] >> 8) & 0xFF); data[(i*3) + 2] = (byte) ((dataBuff[i] >> 0) & 0xFF); } } else { out = new Mat(y, x, CvType.CV_8UC1); data = new byte[x * y * (int)out.elemSize()]; int[] dataBuff = in.getRGB(0, 0, x, y, null, 0, x); for(int i = 0; i < dataBuff.length; i++) { r = (byte) ((dataBuff[i] >> 16) & 0xFF); g = (byte) ((dataBuff[i] >> 8) & 0xFF); b = (byte) ((dataBuff[i] >> 0) & 0xFF); data[i] = (byte)((0.21 * r) + (0.71 * g) + (0.07 * b)); //luminosity } } out.put(0, 0, data); return out; } public void saveImage(){ Date d=new Date(); String fn="filterSettings/OpenCV_Nets/Saved_images/ApsFrame-"+AEDataFile.DATE_FORMAT.format(d)+".png"; BufferedImage theImage = new BufferedImage(chip.getSizeX(), chip.getSizeY(), BufferedImage.TYPE_BYTE_GRAY); for(int y = 0; y<chip.getSizeY(); y++){ for(int x = 0; x<chip.getSizeX(); x++){ int idx = apsDisplay.getPixMapIndex(x, chip.getSizeY()-y-1); int value = ((int)(256*apsDisplay.getPixmapArray()[idx]) << 16) | ((int)(256*apsDisplay.getPixmapArray()[idx+1]) << 8) | (int)(256*apsDisplay.getPixmapArray()[idx+2]); theImage.setRGB(x, y, value); } } File outputfile = new File(fn); try { ImageIO.write(theImage, "png", outputfile); } catch (IOException ex) { Logger.getLogger(ApsFrameExtractor.class.getName()).log(Level.SEVERE, null, ex); } } public void labelImage(){ Date d=new Date(); String fn="filterSettings/OpenCV_Nets/Saved_images/ApsLabeledFrame-"+AEDataFile.DATE_FORMAT.format(d)+".png"; BufferedImage theImage = new BufferedImage(chip.getSizeX(), chip.getSizeY(), BufferedImage.TYPE_BYTE_GRAY); for(int y = 0; y<chip.getSizeY(); y++){ for(int x = 0; x<chip.getSizeX(); x++){ int idx = apsDisplay.getPixMapIndex(x, chip.getSizeY()-y-1); int value = ((int)(256*apsDisplay.getPixmapArray()[idx]) << 16) | ((int)(256*apsDisplay.getPixmapArray()[idx+1]) << 8) | (int)(256*apsDisplay.getPixmapArray()[idx+2]); theImage.setRGB(x, y, value); } } //convert image to mat Mat newMat = img2Mat(theImage, chip.getSizeX(), chip.getSizeY()); //opencv neural net face detection from APS frame faces_in_frame = my_processor.detect(newMat); //re-convert the tagged mat back to image with circles around faces theImage = mat2Img(newMat, chip.getSizeX(), chip.getSizeY()); File outputfile = new File(fn); try { ImageIO.write(theImage, "png", outputfile); } catch (IOException ex) { Logger.getLogger(ApsFrameExtractor.class.getName()).log(Level.SEVERE, null, ex); } } /** Returns timestampFrameStart of last frame, which is the timestampFrameStart of the frame end event * * @return the timestampFrameStart (usually in us) */ public int getLastFrameTimestamp() { return lastFrameTimestamp; } private float scaleGrayValue(float value) { float v; v = ((displayContrast * value) + displayBrightness) / maxADC; if (v < 0) { v = 0; } else if (v > 1) { v = 1; } return v; } public void updateDisplayValue(int xAddr, int yAddr, float value) { if (logCompress && logDecompress) { grayValue = scaleGrayValue((float) (Math.exp(value) - logSafetyOffset)); } else { grayValue = scaleGrayValue(value); } apsDisplay.setPixmapGray(xAddr, yAddr, grayValue); } public void setPixmapArray(float[] pixmapArray) { apsDisplay.setPixmapArray(pixmapArray); } public void displayPreBuffer() { apsDisplay.setPixmapArray(apsDisplayPixmapBuffer); } public void displayPreBufferTagged() { //System.out.println(apsDisplayPixmapBuffer); Date d=new Date(); BufferedImage theImage = new BufferedImage(chip.getSizeX(), chip.getSizeY(), BufferedImage.TYPE_BYTE_GRAY); for(int y = 0; y<chip.getSizeY(); y++){ for(int x = 0; x<chip.getSizeX(); x++){ int idx = apsDisplay.getPixMapIndex(x, chip.getSizeY()-y-1); int value = ((int)(256*apsDisplayPixmapBuffer[idx]) << 16) | ((int)(256*apsDisplayPixmapBuffer[idx+1]) << 8) | (int)(256*apsDisplayPixmapBuffer[idx+2]); theImage.setRGB(x, y, value); } } //convert image to mat Mat newMat = img2Mat(theImage, chip.getSizeX(), chip.getSizeY()); //opencv neural net face detection from APS frame faces_in_frame = my_processor.detect(newMat); //reconvert mat to image theImage = mat2Img(newMat, chip.getSizeX(), chip.getSizeY()); //re-convert the tagged mat back to image with circles around faces apsDisplayPixmapBuffer = img2float(theImage, chip.getSizeX(), chip.getSizeY()); apsDisplay.setPixmapArray(apsDisplayPixmapBuffer); } /** * returns the index <code>y * width + x</code> into pixel arrays for a given x,y location where x is * horizontal address and y is vertical and it starts at lower left corner * with x,y=0,0 and x and y increase to right and up. * * @param x * @param y * @param idx the array index * @see #getWidth() * @see #getHeight() */ public int getIndex(int x, int y) { return (y * width) + x; } /** * Checks if new frame is available. * * @return true if new frame is available * @see #getNewFrame() */ public boolean hasNewFrame() { return newFrame; } /** * Returns a double[] buffer of latest displayed frame with adjustments like brightness, contrast, log intensity conversion, etc. * The array is indexed by y * width + x. To access a particular pixel, * use getIndex(). * * @return the double[] frame */ public float[] getNewFrame() { newFrame = false; return displayFrame; } /** * Returns a clone of the latest float buffer. The array is indexed by <code>y * width + x</code>. * To access a particular pixel, use getIndex() for convenience. * * @return the float[] of pixel values * @see #getIndex(int, int) */ public float[] getDisplayBuffer() { newFrame = false; return displayBuffer.clone(); } /** * Tell chip to acquire new frame, return immediately. * */ public void acquireNewFrame() { apsChip.takeSnapshot(); } public float getMinBufferValue() { float minBufferValue = 0.0f; if (logCompress) { minBufferValue = (float) Math.log(minBufferValue + logSafetyOffset); } return minBufferValue; } public float getMaxBufferValue() { float maxBufferValue = maxADC; if (logCompress) { maxBufferValue = (float) Math.log(maxBufferValue + logSafetyOffset); } return maxBufferValue; } public void setExtRender(boolean setExt) { this.useExtRender = setExt; } public void setLegend(String legend) { this.apsDisplayLegend.s = legend; } public void setDisplayGrayFrame(double[] frame) { int xc = 0; int yc = 0; for (double element : frame) { apsDisplay.setPixmapGray(xc, yc, (float) element); xc++; if (xc == width) { xc = 0; yc++; } } } public void setDisplayFrameRGB(float[] frame) { int xc = 0; int yc = 0; for (int i = 0; i < frame.length; i += 3) { apsDisplay.setPixmapRGB(xc, yc, frame[i + 2], frame[i + 1], frame[i]); xc++; if (xc == width) { xc = 0; yc++; } } } /** * @return the invertIntensity */ public boolean isInvertIntensity() { return invertIntensity; } /** * @param invertIntensity the invertIntensity to set */ public void setInvertIntensity(boolean invertIntensity) { this.invertIntensity = invertIntensity; putBoolean("invertIntensity", invertIntensity); } /** * @return the preBufferFrame */ public boolean isPreBufferFrame() { return preBufferFrame; } /** * @return the realTimeLabelFrame */ public boolean isrealTimeLabelFrame() { return realTimeLabelFrame; } /** * @param invertIntensity the invertIntensity to set */ public void setPreBufferFrame(boolean preBuffer) { this.preBufferFrame = preBuffer; putBoolean("preBufferFrame", preBufferFrame); } public void setrealTimeLabelFrame(boolean realTimeLabelFrame) { this.realTimeLabelFrame = realTimeLabelFrame; putBoolean("realTimeLabelFrame", realTimeLabelFrame); } /** * @return the logDecompress */ public boolean isLogDecompress() { return logDecompress; } /** * @param logDecompress the logDecompress to set */ public void setLogDecompress(boolean logDecompress) { this.logDecompress = logDecompress; putBoolean("logDecompress", logDecompress); } /** * @return the logCompress */ public boolean isLogCompress() { return logCompress; } /** * @param logCompress the logCompress to set */ public void setLogCompress(boolean logCompress) { this.logCompress = logCompress; putBoolean("logCompress", logCompress); } /** * @return the displayContrast */ public float getDisplayContrast() { return displayContrast; } /** * @param displayContrast the displayContrast to set */ public void setDisplayContrast(float displayContrast) { this.displayContrast = displayContrast; putFloat("displayContrast", displayContrast); resetFilter(); } /** * @return the displayBrightness */ public float getDisplayBrightness() { return displayBrightness; } /** * @param displayBrightness the displayBrightness to set */ public void setDisplayBrightness(float displayBrightness) { this.displayBrightness = displayBrightness; putFloat("displayBrightness", displayBrightness); resetFilter(); } public Extraction getExtractionMethod() { return extractionMethod; } synchronized public void setExtractionMethod(Extraction extractionMethod) { getSupport().firePropertyChange("extractionMethod", this.extractionMethod, extractionMethod); putString("edgePixelMethod", extractionMethod.toString()); this.extractionMethod = extractionMethod; resetFilter(); } /** * @return the showAPSFrameDisplay */ public boolean isShowAPSFrameDisplay() { return showAPSFrameDisplay; } public boolean isface_loggingEnabled(){ return face_loggingEnabled; } // public void setface_loggingEnabled(boolean logData) { // this.face_loggingEnabled = logData; // putBoolean("face_loggingEnabled", logData); /** * @param showAPSFrameDisplay the showAPSFrameDisplay to set */ public void setShowAPSFrameDisplay(boolean showAPSFrameDisplay) { this.showAPSFrameDisplay = showAPSFrameDisplay; putBoolean("showAPSFrameDisplay", showAPSFrameDisplay); if (apsFrame != null) { apsFrame.setVisible(showAPSFrameDisplay); } getSupport().firePropertyChange("showAPSFrameDisplay", null, showAPSFrameDisplay); } @Override public synchronized void setFilterEnabled(boolean yes) { super.setFilterEnabled(yes); //To change body of generated methods, choose Tools | Templates. if (!isFilterEnabled()) { if (apsFrame != null) { apsFrame.setVisible(false); } } } /** * returns frame width in pixels. * * @return the width */ public int getWidth() { return width; } /** * returns frame height in pixels * * @return the height */ public int getHeight() { return height; } /** * returns max ADC value * * @return the maxADC */ public int getMaxADC() { return maxADC; } /** * returns max index into frame buffer arrays * * @return the maxIDX */ public int getMaxIDX() { return maxIDX; } public void doSaveAsPNG() { saveImage(); } public void doLabelFacesAndSaveAsPNG() { labelImage(); } /** * @param loggingEnabled the loggingEnabled to set */ public void setface_loggingEnabled(boolean this_face) { this.face_loggingEnabled = this_face; putBoolean("face_loggingEnabled", this_face); boolean old = this.face_loggingEnabled; boolean success = false; if (this_face) { File f = face_startLogging(); if (f == null) { log.warning("face_startLogging returned null"); this.face_loggingEnabled=false; putBoolean("face_loggingEnabled", false); } else { success = true; } } else { File f = face_stopLogging(false); if (f == null) { log.warning("face_stopLogging returned null"); } else { success = true; } } //this.face_loggingEnabled = face_loggingEnabled; getSupport().firePropertyChange("face_loggingEnabled", old, face_loggingEnabled); } /** Starts logging AE data to a file. * * @param filename the filename to log to, including all path information. Filenames without path * are logged to the startup folder. The default extension of AEDataFile.DATA_FILE_EXTENSION is appended if there is no extension. * * @return the file that is logged to. */ synchronized public File face_startLogging(String filename) { if (filename == null) { log.warning("tried to log to null filename, aborting"); return null; } if (!filename.toLowerCase().endsWith(AEDataFile.DATA_FILE_EXTENSION)) { filename = filename + AEDataFile.DATA_FILE_EXTENSION; log.info("Appended extension to make filename=" + filename); } try { face_loggingFile = new File(filename); face_loggingOutputStream = new AEFileOutputStream(new BufferedOutputStream(new FileOutputStream(face_loggingFile), 100000), chip); face_loggingEnabled = true; getSupport().firePropertyChange("face_loggingEnabled", null, true); log.info("starting logging to " + face_loggingFile); } catch (IOException e) { face_loggingFile = null; log.warning("exception on starting to log data to file "+filename+": "+e); face_loggingEnabled=false; getSupport().firePropertyChange("face_loggingEnabled", null, false); } return face_loggingFile; } /** Starts logging data to a default data logging file. * @return the file that is logged to. * @throws IOException */ public File face_startLogging() { String dateString = face_filenameTimestampEnabled ? AEDataFile.DATE_FORMAT.format(new Date()) : ""; String base = chip.getClass().getSimpleName(); int suffixNumber = face_rotateFilesEnabled ? face_rotationNumber++ : 0; if (face_rotationNumber >= face_rotatePeriod) { face_rotationNumber = 0; } boolean succeeded = false; String filename; if ((face_logFileBaseName != null) && !face_logFileBaseName.isEmpty()) { base = face_logFileBaseName; } String suffix; if (face_rotateFilesEnabled) { suffix = String.format("%02d", suffixNumber); } else { suffix = ""; } do { filename = face_loggingFolder + File.separator + base + "-" + dateString + "-" + suffix + AEDataFile.DATA_FILE_EXTENSION; File lf = new File(filename); if (face_rotateFilesEnabled) { succeeded = true; // if rotation, always use next file } else if (!lf.isFile()) { succeeded = true; } } while ((succeeded == false) && (suffixNumber++ <= 99)); if (succeeded == false) { log.warning("could not open a unigue new file for logging after trying up to " + filename + " aborting startLogging"); return null; } log.info("SAVING LABELED DATA IN log.info("FILENAME label file_labels = new File(filename+"_label.txt"); //if file doesn't exists, then create it if(!file_labels.exists()){ try { file_labels.createNewFile(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } try { fileWritter = new FileWriter(filename+"_label.txt"); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } bufferWritter = new BufferedWriter(fileWritter); try { bufferWritter.append("# LABEL DATA from OPEN CV\n"); bufferWritter.append("# The format of the file is timestamp, loc_x, loc_y, width, and height of the face (in pixels)\n"); bufferWritter.append("# timestamp loc_x loc_y dimx dimy\n"); bufferWritter.flush(); bufferWritter.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } face_bytesWritten = 0; File lf = face_startLogging(filename); log.info("AER DATA FILENAME return lf; } /** Stops logging and optionally opens file dialog for where to save file. * If number of AEViewers is more than one, dialog is also skipped since we may be logging from multiple viewers. * @param confirmFilename true to show file dialog to confirm filename, false to skip dialog. * @return chosen File */ synchronized public File face_stopLogging(boolean confirmFilename) { if (!face_loggingEnabled) { return null; } // the file has already been logged somewhere with a timestamped name, what this method does is // to move the already logged file to a possibly different location with a new name, or if cancel is hit, // to delete it. int retValue = JFileChooser.CANCEL_OPTION; try { log.info("stopped logging at " + AEDataFile.DATE_FORMAT.format(new Date())); face_loggingEnabled = false; bufferWritter.flush(); bufferWritter.close(); fileWritter.close(); face_loggingOutputStream.close(); //if jaer viewer is logging synchronized data files, then just save the file where it was logged originally if (confirmFilename) { JFileChooser chooser = new JFileChooser(); chooser.setCurrentDirectory(new File(face_loggingFolder)); chooser.setFileFilter(new DATFileFilter()); chooser.setDialogTitle("Save logged data"); String fn = face_loggingFile.getName(); // System.out.println("fn="+fn); // strip off .aedat to make it easier to add comment to filename String base = fn.substring(0, fn.lastIndexOf(AEDataFile.DATA_FILE_EXTENSION)); chooser.setSelectedFile(new File(base)); chooser.setDialogType(JFileChooser.SAVE_DIALOG); chooser.setMultiSelectionEnabled(false); boolean savedIt = false; do { // clear the text input buffer to prevent multiply typed characters from destroying proposed datetimestamped filename retValue = chooser.showSaveDialog(chip.getAeViewer()); if (retValue == JFileChooser.APPROVE_OPTION) { File newFile = chooser.getSelectedFile(); // make sure filename ends with .aedat if (!newFile.getName().endsWith(AEDataFile.DATA_FILE_EXTENSION)) { newFile = new File(newFile.getCanonicalPath() + AEDataFile.DATA_FILE_EXTENSION); } //we'll rename the logged data file to the selection boolean renamed = face_loggingFile.renameTo(newFile); if (renamed) { // if successful, cool, save persistence savedIt = true; face_setLoggingFolder(chooser.getCurrentDirectory().getPath()); face_loggingFile = newFile; // so that we play it back if it was saved and playback immediately is selected log.info("renamed logging file to " + newFile); } else { // confirm overwrite int overwrite = JOptionPane.showConfirmDialog(chooser, "Overwrite file \"" + newFile + "\"?", "Overwrite file?", JOptionPane.WARNING_MESSAGE, JOptionPane.OK_CANCEL_OPTION); if (overwrite == JOptionPane.OK_OPTION) { // we need to delete the file boolean deletedOld = newFile.delete(); if (deletedOld) { savedIt = face_loggingFile.renameTo(newFile); savedIt = true; log.info("renamed logging file to " + newFile); // TODO something messed up here with confirmed overwrite of logging file face_loggingFile = newFile; } else { log.warning("couldn't delete logging file " + newFile); } } else { chooser.setDialogTitle("Couldn't save file there, try again"); } } } else { // user hit cancel, delete logged data boolean deleted = face_loggingFile.delete(); if (deleted) { log.info("Deleted temporary logging file " + face_loggingFile); } else { log.warning("Couldn't delete temporary logging file " + face_loggingFile); } savedIt = true; } } while (savedIt == false); // keep trying until user is happy (unless they deleted some crucial data!) } } catch (IOException e) { e.printStackTrace(); } face_loggingEnabled = false; getSupport().firePropertyChange("loggingEnabled", null, false); return face_loggingFile; } /** * @param loggingFolder the lastFolderName to set */ public void face_setLoggingFolder(String loggingFolder) { String old = loggingFolder; this.face_loggingFolder = loggingFolder; getPrefs().put("DataLogger.loggingFolder", loggingFolder); getSupport().firePropertyChange("loggingFolder", old, loggingFolder); } /** * @return the maxLogFileSizeMB */ public int getface_MaxLogFileSizeMB() { return face_maxLogFileSizeMB; } /** * @param maxLogFileSizeMB the maxLogFileSizeMB to set */ public void setface_MaxLogFileSizeMB(int maxLogFileSizeMB) { this.face_maxLogFileSizeMB = maxLogFileSizeMB; prefs().putInt("DataLogger.maxLogFileSizeMB", maxLogFileSizeMB); } } class Labeled_image { private Mat inputframe; private int w; private int h; private double loc_x; private double loc_y; // constructor public Labeled_image(Mat inputframe, int w, int h, double d, double e) { this.inputframe = inputframe; this.w = w; this.h = h; this.loc_x = d; this.loc_y = e; } // getter public Mat getMatImage() { return inputframe; } public int getw() { return w; } public int geth() { return h; } public double getloc_x() { return loc_x; } public double getloc_y() { return loc_y; } // setter public void setMatImage(Mat name) { this.inputframe = name; } public void setw(int code) { this.w = code; } public void seth(int code) { this.h = code; } public void setloc_x(float code) { this.loc_x = code; } public void setloc_y(float code) { this.loc_y = code; } } class processor { private CascadeClassifier face_cascade; // Create a constructor method public processor(){ //load network pre-trained weights face_cascade=new CascadeClassifier("filterSettings/OpenCV_Nets/haarcascade_frontalface_alt.xml"); //face_cascade=new CascadeClassifier("/Users/federicocorradi/Documents/workspace/FaceDetectionOpenCV/src/main/resources/lbpcascade_frontalface.xml"); if(face_cascade.empty()) { System.out.println("--(!)Error loading A\n"); return; } else { System.out.println("Face classifier loaded..."); } } public Labeled_image[] detect(Mat inputframe){ MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist( inputframe, inputframe ); face_cascade.detectMultiScale(inputframe, faces); System.out.println(String.format("Detected %s faces", faces.toArray().length)); Labeled_image[] this_frame = new Labeled_image[faces.toArray().length]; int counter = 0; for(Rect rect:faces.toArray()) { Point center= new Point(rect.x + (rect.width*0.5), rect.y + (rect.height*0.5) ); Imgproc.ellipse( inputframe, center, new Size( rect.width*0.5, rect.height*0.5), 0, 0, 360, new Scalar( 255, 0, 255 ), 4, 8, 0 ); System.out.println(String.format("Face detected in center (%f,%f) rect size (%d,%d)", rect.x+ (rect.width*0.5), rect.y+ (rect.height*0.5), rect.width , rect.height)); this_frame[counter] = new Labeled_image(inputframe, rect.width, rect.height, rect.x+ (rect.width*0.5), rect.y+ (rect.height*0.5)); //setMatImage(inputframe); //this_frame[counter].setw(rect.width); //this_frame[counter].seth(rect.height); //this_frame[counter].setloc_x(rect.x); //this_frame[counter].setloc_y(rect.y); counter = counter +1; } return this_frame; } }
package net.zephyrizing.http_server_test; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import java.util.Map; import net.zephyrizing.http_server.HttpRequest; import net.zephyrizing.http_server.HttpRequest.Method; import net.zephyrizing.http_server.RequestBuilder; import static net.zephyrizing.http_server.HttpRequest.Method.*; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.assertThat; import static org.hamcrest.CoreMatchers.*; public class RequestBuilderTest { @Test public void canAddAMethodAndPath() { Method m = GET; String p = "/"; RequestBuilder b = new RequestBuilder() .method(m) .path(p); HttpRequest request = b.build(); assertThat(request.method(), equalTo(m)); assertThat(request.path().toString(), equalTo(p)); } @Test public void canAddHeaders() { RequestBuilder b = baseRequestBuilder(); String key = "Content-Length"; List<String> val = Arrays.asList("A", "B"); b.header(key, val); HttpRequest request = b.build(); assertThat(request, notNullValue()); Map<String, List<String>> headers = request.headers(); assertThat(headers, notNullValue()); assertThat(headers.keySet(), hasItem(key)); assertThat(headers.get(key), equalTo(val)); } @Test public void canReturnContentLength() { RequestBuilder b = baseRequestBuilder(); String key = "Content-Length"; List<String> val = Arrays.asList("10"); b.header(key, val); assertThat(b.hasContentHeader(), equalTo(true)); assertThat(b.contentLength(), equalTo(10L)); } @Test public void canAddBody() { RequestBuilder rb = baseRequestBuilder(); ByteBuffer bb = ByteBuffer.allocate(10); rb.body(bb); HttpRequest request = rb.build(); assertThat(request.body(), sameInstance(bb)); } private RequestBuilder baseRequestBuilder() { return new RequestBuilder() .method(GET) .path("/"); } }
package hu.blummers.bitcoin.model; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import com.mysema.query.jpa.impl.JPAQuery; import hu.blummers.bitcoin.core.BitcoinPeer; import hu.blummers.bitcoin.core.ChainStore; import hu.blummers.bitcoin.core.Chain; import hu.blummers.bitcoin.core.Difficulty; import hu.blummers.bitcoin.core.Hash; import hu.blummers.bitcoin.core.ValidationException; import hu.blummers.bitcoin.model.QJpaBlock; import hu.blummers.bitcoin.model.QJpaHead; import hu.blummers.bitcoin.model.QJpaTransaction; @Component("store") @Transactional(propagation = Propagation.MANDATORY) public class JpaChainStore implements ChainStore { private static final Logger log = LoggerFactory.getLogger(JpaChainStore.class); @PersistenceContext EntityManager entityManager; private Head currentHead = null; private Map<String, Head> heads = new HashMap<String, Head>(); private Map<String, Member> members = new HashMap<String, Member>(); private Map<BitcoinPeer, TreeSet<KnownMember>> knownByPeer = new HashMap<BitcoinPeer, TreeSet<KnownMember>> (); private Map<BitcoinPeer, HashSet<String>> requestsByPeer = new HashMap<BitcoinPeer, HashSet<String>> (); private Comparator<KnownMember> incomingOrder = new Comparator<KnownMember> (){ @Override public int compare(KnownMember arg0, KnownMember arg1) { int diff = arg0.nr - arg1.nr; if ( diff != 0 ) return diff; else return arg0.equals(arg1) ? 0 : arg0.hashCode() - arg1.hashCode(); }}; @Autowired PlatformTransactionManager transactionManager; public class Head { private StoredMember last; private double chainWork; private long height; public StoredMember getLast() { return last; } public double getChainWork() { return chainWork; } public long getHeight() { return height; } public void setLast(StoredMember last) { this.last = last; } public void setChainWork(double chainWork) { this.chainWork = chainWork; } public void setHeight(long height) { this.height = height; } } public class Member { protected String hash; public Member(String hash) { super(); this.hash = hash; } public String getHash() { return hash; } @Override public int hashCode() { return hash.hashCode(); } } public class StoredMember extends Member { public StoredMember(String hash, Long id, StoredMember previous, long time) { super(hash); this.id = id; this.previous = previous; this.time = time; } protected Long id; protected StoredMember previous; protected long time; public Long getId() { return id; } public StoredMember getPrevious() { return previous; } public long getTime() { return time; } } public class KnownMember extends Member { protected Set<BitcoinPeer> knownBy; protected int nr; public KnownMember(String hash, int nr, Set<BitcoinPeer> knownBy) { super(hash); this.knownBy = knownBy; this.nr = nr; } public Set<BitcoinPeer> getKnownBy() { return knownBy; } public int getNr() { return nr; } } public void cache() { log.trace("filling chain cache with stored blocks"); QJpaBlock block = QJpaBlock.jpaBlock; JPAQuery q = new JPAQuery(entityManager); for (JpaBlock b : q.from(block).list(block)) { if ( b.getPrevious() != null ) members.put(b.getHash(), new StoredMember(b.getHash(), b.getId(), (StoredMember) members.get(b.getPrevious().getHash()), b.getCreateTime())); else members.put(b.getHash(), new StoredMember(b.getHash(), b.getId(), null, b.getCreateTime())); } log.trace("filling chain cache with heads"); QJpaHead head = QJpaHead.jpaHead; q = new JPAQuery(entityManager); for (JpaHead h : q.from(head).list(head)) { Head sh = new Head(); sh.setChainWork(h.getChainWork()); sh.setHeight(h.getHeight()); sh.setLast((StoredMember)members.get(h.getLeaf())); heads.put(h.getLeaf(), sh); if ( currentHead == null || currentHead.getChainWork() < sh.getChainWork() ) currentHead = sh; } } public synchronized void addInventory (String hash, BitcoinPeer peer) { Member cached = members.get(hash); if ( cached == null ) { HashSet<BitcoinPeer> peers = new HashSet<BitcoinPeer> (); members.put(hash, cached = new KnownMember (hash, members.size(), peers)); } if ( !(cached instanceof KnownMember) ) return; ((KnownMember)cached).getKnownBy().add(peer); TreeSet<KnownMember> membersOfPeer = knownByPeer.get(peer); if ( membersOfPeer == null ) { membersOfPeer = new TreeSet<KnownMember> (incomingOrder); knownByPeer.put(peer, membersOfPeer); } membersOfPeer.add((KnownMember)cached); } public synchronized List<String> getRequests (BitcoinPeer peer) { HashSet<String> requests = requestsByPeer.get(peer); if ( requests == null ) requests = new HashSet<String> (); TreeSet<KnownMember> knownbyThisPeer = knownByPeer.get(peer); ArrayList<String> result = new ArrayList<String> (); if ( knownbyThisPeer != null ) { for ( KnownMember kn : knownbyThisPeer ) { requests.add(kn.getHash()); result.add(kn.getHash()); } requestsByPeer.put(peer, requests); } knownByPeer.get(peer).clear(); return result; } public synchronized void removePeer (BitcoinPeer peer) { requestsByPeer.remove(peer); TreeSet<KnownMember> ms = knownByPeer.get(peer); if ( ms != null ) for ( KnownMember m : ms ) m.getKnownBy().remove(peer); knownByPeer.remove(peer); } public synchronized List<String> getLocator () { List<String> locator = new ArrayList<String> (); StoredMember curr = currentHead.getLast(); StoredMember prev = curr.getPrevious(); for ( int i =0, step = 1; prev != null; ++i ) { locator.add(curr.getHash()); for ( int j =0; prev != null && j < step; ++j ) { curr = prev; prev = curr.getPrevious(); } if ( i > 10 ) step *= 2; } if ( curr != currentHead.getLast() ) locator.add(curr.getHash()); return locator; } @Override public synchronized long store(JpaBlock b) throws ValidationException { b.computeHash (); if ( b.getCreateTime() > System.currentTimeMillis()/1000 ) throw new ValidationException ("Future generation attempt or lagging system clock."); Member cached = members.get(b.getHash()); if (cached instanceof StoredMember) return currentHead.getHeight(); for ( TreeSet<KnownMember> k : knownByPeer.values () ) { k.remove(cached); } List<BitcoinPeer> finishedPeer = new ArrayList<BitcoinPeer> (); for ( Map.Entry<BitcoinPeer,HashSet<String>> e : requestsByPeer.entrySet() ) { e.getValue().remove(b.getHash()); if ( e.getValue().size() == 0 ) finishedPeer.add(e.getKey ()); } for ( BitcoinPeer p : finishedPeer ) requestsByPeer.remove(p); // find previous block Member cachedPrevious = members.get(b.getPreviousHash()); JpaBlock prev = null; if ( cachedPrevious instanceof StoredMember ) { prev = entityManager.find(JpaBlock.class, ((StoredMember) cachedPrevious).getId()); } if (prev != null) { b.setPrevious(prev); boolean branching = false; JpaHead head; if (prev.getHead().getLeaf ().equals(prev.getHash()) ) { // continuing head = prev.getHead(); head.setLeaf(b.getHash()); head.setHeight(head.getHeight() + 1); head.setChainWork(head.getChainWork() + Difficulty.getDifficulty(b.getDifficultyTarget())); head = entityManager.merge(head); } else { // branching branching = true; head = new JpaHead(); head.setTrunk(prev.getHash()); head.setHeight(prev.getHeight()); head.setChainWork(prev.getChainWork()); head.setPrevious(prev.getHead()); head.setLeaf(b.getHash()); head.setHeight(head.getHeight() + 1); head.setChainWork(head.getChainWork() + Difficulty.getDifficulty(b.getDifficultyTarget())); entityManager.persist(head); } b.setHead(head); b.setHeight(head.getHeight()); b.setChainWork(head.getChainWork()); if ( prev != null ) { if ( b.getHeight() > 2016 && b.getHeight() % 2016 == 0 ) { StoredMember c = null; StoredMember p = (StoredMember)cachedPrevious; for ( int i = 0; i < 2016; ++i ) { c = p; p = c.getPrevious(); } if (Difficulty.getNextTarget(b.getCreateTime() - p.getTime (), prev.getDifficultyTarget()) != b.getDifficultyTarget() ) { throw new ValidationException ("Difficulty does not match expectation"); } } else { if ( b.getDifficultyTarget() != prev.getDifficultyTarget() ) throw new ValidationException ("Illegal attempt to change difficulty"); } } if ( new Hash(b.getHash()).toBigInteger().compareTo(Difficulty.getTarget(b.getDifficultyTarget())) > 0 ) throw new ValidationException ("Insufficuent proof of work for current difficulty"); boolean coinbase = true; Map<String, JpaTransaction> blockTransactions = new HashMap<String, JpaTransaction> (); for (JpaTransaction t : b.getTransactions()) { t.calculateHash(); blockTransactions.put(t.getHash(), t); if ( coinbase ) { coinbase = false; continue; } if ( t.getInputs () != null ) { for ( JpaTransactionInput i : t.getInputs() ) { JpaTransaction sourceTransaction; JpaTransactionOutput transactionOutput = null; if ( (sourceTransaction = blockTransactions.get(i.getSourceHash())) != null ) { if ( i.getIx () < sourceTransaction.getOutputs().size () ) transactionOutput = sourceTransaction.getOutputs().get((int)i.getIx()); } else { QJpaTransaction tx = QJpaTransaction.jpaTransaction; QJpaTransactionOutput txout = QJpaTransactionOutput.jpaTransactionOutput; JPAQuery query = new JPAQuery(entityManager); transactionOutput = query.from(txout).join(txout.transaction, tx) .where(tx.hash.eq(i.getSourceHash()).and(txout.ix.eq(i.getIx()))). orderBy(tx.id.desc()).limit(1).uniqueResult(txout); } if ( transactionOutput == null ) throw new ValidationException ("Transaction input refers to unknown output "); i.setSource(transactionOutput); if ( i.getSource().getSink() != null ) throw new ValidationException ("Double spending attempt"); } } } entityManager.persist(b); for (JpaTransaction t : b.getTransactions()) for ( JpaTransactionInput i : t.getInputs() ) if ( i.getSource() != null ) { i.getSource().setSink(i); entityManager.merge(i.getSource()); } StoredMember m = new StoredMember(b.getHash(), b.getId(), (StoredMember) members.get(b.getPrevious().getHash()), b.getCreateTime()); members.put(b.getHash(), m); Head usingHead = currentHead; if ( branching ) { heads.put(b.getHash(), usingHead = new Head ()); } if ( head.getChainWork() > currentHead.getChainWork() ) { currentHead = usingHead; } usingHead.setLast(m); usingHead.setChainWork(head.getChainWork()); usingHead.setHeight(head.getHeight()); log.trace("stored block " + b.getHash()); } return currentHead.getHeight(); } @Override public String getHeadHash() { return currentHead.getLast().getHash(); } @Override public void resetStore(Chain chain) { JpaBlock genesis = chain.getGenesis(); JpaHead h = new JpaHead(); h.setLeaf(genesis.getHash()); h.setHeight(1); h.setChainWork(Difficulty.getDifficulty(genesis.getDifficultyTarget())); entityManager.persist(h); genesis.setHead(h); entityManager.persist(genesis); } @Override public JpaBlock get(String hash) { QJpaBlock block = QJpaBlock.jpaBlock; JPAQuery query = new JPAQuery(entityManager); return query.from(block).where(block.hash.eq(hash)).uniqueResult(block); } @Override public long getChainHeight() { Head longest = null; for ( Head h : heads.values() ) { if ( longest == null || longest.getChainWork() < h.getChainWork() ) longest = h; } return longest.getHeight (); } @Override public synchronized int getNumberOfRequests(BitcoinPeer peer) { HashSet<String> s = requestsByPeer.get(peer); if ( s == null ) return 0; return s.size(); } }
package org.c4sg.controller; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import org.c4sg.dto.SkillDTO; import org.c4sg.service.SkillService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import java.util.List; @CrossOrigin @RestController @RequestMapping("/api/skills") @Api(description = "Operations about Skills", tags = "skill") public class SkillController { @Autowired private SkillService skillService; @CrossOrigin @RequestMapping(value = "/all", produces = {"application/json"}, method = RequestMethod.GET) @ApiOperation(value = "Find all skills", notes = "Find all skills, ranked by most popular selections by volunteers and nonprofits.") public List<SkillDTO> getSkills() { return skillService.findSkills(); } }
package info.guardianproject.otr; import info.guardianproject.iocipher.File; import info.guardianproject.iocipher.FileInputStream; import info.guardianproject.iocipher.FileOutputStream; import info.guardianproject.iocipher.RandomAccessFile; import info.guardianproject.otr.app.im.IDataListener; import info.guardianproject.otr.app.im.app.ImApp; import info.guardianproject.otr.app.im.app.IocVfs; import info.guardianproject.otr.app.im.engine.Address; import info.guardianproject.otr.app.im.engine.ChatSession; import info.guardianproject.otr.app.im.engine.DataHandler; import info.guardianproject.otr.app.im.engine.Message; import info.guardianproject.util.Debug; import info.guardianproject.util.LogCleaner; import info.guardianproject.util.SystemServices; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import net.java.otr4j.session.SessionStatus; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.NullOutputStream; import org.apache.http.HttpException; import org.apache.http.HttpMessage; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestFactory; import org.apache.http.HttpResponse; import org.apache.http.HttpResponseFactory; import org.apache.http.MethodNotSupportedException; import org.apache.http.ProtocolVersion; import org.apache.http.RequestLine; import org.apache.http.impl.DefaultHttpResponseFactory; import org.apache.http.impl.io.AbstractSessionInputBuffer; import org.apache.http.impl.io.AbstractSessionOutputBuffer; import org.apache.http.impl.io.HttpRequestParser; import org.apache.http.impl.io.HttpRequestWriter; import org.apache.http.impl.io.HttpResponseParser; import org.apache.http.impl.io.HttpResponseWriter; import org.apache.http.io.HttpMessageWriter; import org.apache.http.io.SessionInputBuffer; import org.apache.http.message.BasicHttpRequest; import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicLineFormatter; import org.apache.http.message.BasicLineParser; import org.apache.http.message.BasicStatusLine; import org.apache.http.message.LineFormatter; import org.apache.http.message.LineParser; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import android.net.Uri; import android.os.Environment; import android.os.RemoteException; import android.util.Log; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.Maps; import com.google.common.collect.Sets; public class OtrDataHandler implements DataHandler { public static final String URI_PREFIX_OTR_IN_BAND = "otr-in-band:/storage/"; private static final int MAX_OUTSTANDING = 3; private static final int MAX_CHUNK_LENGTH = 32768; private static final int MAX_TRANSFER_LENGTH = 1024*1024*64; private static final byte[] EMPTY_BODY = new byte[0]; private static final String TAG = "GB.OtrDataHandler"; private static final ProtocolVersion PROTOCOL_VERSION = new ProtocolVersion("HTTP", 1, 1); private static HttpParams params = new BasicHttpParams(); private static HttpRequestFactory requestFactory = new MyHttpRequestFactory(); private static HttpResponseFactory responseFactory = new DefaultHttpResponseFactory(); private LineParser lineParser = new BasicLineParser(PROTOCOL_VERSION); private LineFormatter lineFormatter = new BasicLineFormatter(); private ChatSession mChatSession; private IDataListener mDataListener; private SessionStatus mOtrStatus; public OtrDataHandler(ChatSession chatSession) { this.mChatSession = chatSession; } public void onOtrStatusChanged(SessionStatus status) { mOtrStatus = status; if (status == SessionStatus.ENCRYPTED) { retryRequests(); } } private void retryRequests() { // Resend all unfilled requests for (Request request: requestCache.asMap().values()) { if (!request.isSeen()) sendRequest(request); } } public void setDataListener (IDataListener dataListener) { mDataListener = dataListener; } public static class MyHttpRequestFactory implements HttpRequestFactory { public MyHttpRequestFactory() { super(); } public HttpRequest newHttpRequest(final RequestLine requestline) throws MethodNotSupportedException { if (requestline == null) { throw new IllegalArgumentException("Request line may not be null"); } //String method = requestline.getMethod(); return new BasicHttpRequest(requestline); } public HttpRequest newHttpRequest(final String method, final String uri) throws MethodNotSupportedException { return new BasicHttpRequest(method, uri); } } static class MemorySessionInputBuffer extends AbstractSessionInputBuffer { public MemorySessionInputBuffer(byte[] value) { init(new ByteArrayInputStream(value), 1000, params); } @Override public boolean isDataAvailable(int timeout) throws IOException { throw new UnsupportedOperationException(); } } static class MemorySessionOutputBuffer extends AbstractSessionOutputBuffer { ByteArrayOutputStream outputStream; public MemorySessionOutputBuffer() { outputStream = new ByteArrayOutputStream(1000); init(outputStream, 1000, params); } public byte[] getOutput() { return outputStream.toByteArray(); } } public void onIncomingRequest(Address requestThem, Address requestUs, byte[] value) { //Log.e( TAG, "onIncomingRequest:" + requestThem); SessionInputBuffer inBuf = new MemorySessionInputBuffer(value); HttpRequestParser parser = new HttpRequestParser(inBuf, lineParser, requestFactory, params); HttpRequest req; try { req = (HttpRequest)parser.parse(); } catch (IOException e) { throw new RuntimeException(e); } catch (HttpException e) { e.printStackTrace(); return; } String requestMethod = req.getRequestLine().getMethod(); String uid = req.getFirstHeader("Request-Id").getValue(); String url = req.getRequestLine().getUri(); if (requestMethod.equals("OFFER")) { debug("incoming OFFER " + url); if (!url.startsWith(URI_PREFIX_OTR_IN_BAND)) { debug("Unknown url scheme " + url); sendResponse(requestUs, 400, "Unknown scheme", uid, EMPTY_BODY); return; } sendResponse(requestUs, 200, "OK", uid, EMPTY_BODY); if (!req.containsHeader("File-Length")) { sendResponse(requestUs, 400, "File-Length must be supplied", uid, EMPTY_BODY); return; } int length = Integer.parseInt(req.getFirstHeader("File-Length").getValue()); if (!req.containsHeader("File-Hash-SHA1")) { sendResponse(requestUs, 400, "File-Hash-SHA1 must be supplied", uid, EMPTY_BODY); return; } String sum = req.getFirstHeader("File-Hash-SHA1").getValue(); String type = null; if (req.containsHeader("Mime-Type")) { type = req.getFirstHeader("Mime-Type").getValue(); } debug("Incoming sha1sum " + sum); VfsTransfer transfer; try { transfer = new VfsTransfer(url, type, length, requestUs, sum); } catch (IOException e) { e.printStackTrace(); return; } transferCache.put(url, transfer); // Handle offer // TODO ask user to confirm we want this boolean accept = false; if (mDataListener != null) { try { mDataListener.onTransferRequested(url, requestThem.getAddress(),requestUs.getAddress(),transfer.url); //callback is now async, via "acceptTransfer" method // if (accept) // transfer.perform(); } catch (RemoteException e) { LogCleaner.error(ImApp.LOG_TAG, "error approving OTRDATA transfer request", e); } } } else if (requestMethod.equals("GET") && url.startsWith(URI_PREFIX_OTR_IN_BAND)) { debug("incoming GET " + url); ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream(); int reqEnd; try { Offer offer = offerCache.getIfPresent(url); if (offer == null) { sendResponse(requestUs, 400, "No such offer made", uid, EMPTY_BODY); return; } offer.seen(); // in case we don't see a response to underlying request, but peer still proceeds if (!req.containsHeader("Range")) { sendResponse(requestUs, 400, "Range must start with bytes=", uid, EMPTY_BODY); return; } String rangeHeader = req.getFirstHeader("Range").getValue(); String[] spec = rangeHeader.split("="); if (spec.length != 2 || !spec[0].equals("bytes")) { sendResponse(requestUs, 400, "Range must start with bytes=", uid, EMPTY_BODY); return; } String[] startEnd = spec[1].split("-"); if (startEnd.length != 2) { sendResponse(requestUs, 400, "Range must be START-END", uid, EMPTY_BODY); return; } int start = Integer.parseInt(startEnd[0]); int end = Integer.parseInt(startEnd[1]); if (end - start + 1 > MAX_CHUNK_LENGTH) { sendResponse(requestUs, 400, "Range must be at most " + MAX_CHUNK_LENGTH, uid, EMPTY_BODY); return; } File fileGet = new File(offer.getUri()); java.io.FileInputStream is = new java.io.FileInputStream(fileGet); readIntoByteBuffer(byteBuffer, is, start, end); is.close(); if (mDataListener != null) { float percent = ((float)end) / ((float)fileGet.length()); mDataListener.onTransferProgress(true, offer.getId(), requestThem.getAddress(), offer.getUri(), percent); if (percent > .98f) { String mimeType = null; if (req.getFirstHeader("Mime-Type") != null) mimeType = req.getFirstHeader("Mime-Type").getValue(); mDataListener.onTransferComplete(true, offer.getId(), requestThem.getAddress(), offer.getUri(), mimeType, offer.getUri()); } } } catch (UnsupportedEncodingException e) { // throw new RuntimeException(e); sendResponse(requestUs, 400, "Unsupported encoding", uid, EMPTY_BODY); return; } catch (IOException e) { //throw new RuntimeException(e); sendResponse(requestUs, 400, "IOException", uid, EMPTY_BODY); return; } catch (NumberFormatException e) { sendResponse(requestUs, 400, "Range is not numeric", uid, EMPTY_BODY); return; } catch (Exception e) { sendResponse(requestUs, 500, "Unknown error", uid, EMPTY_BODY); return; } byte[] body = byteBuffer.toByteArray(); debug("Sent sha1 is " + sha1sum(body)); sendResponse(requestUs, 200, "OK", uid, body); } else { debug("Unknown method / url " + requestMethod + " " + url); sendResponse(requestUs, 400, "OK", uid, EMPTY_BODY); } } public void acceptTransfer (String url) { Transfer transfer = transferCache.getIfPresent(url); if (transfer != null) { transfer.perform(); } } private static void readIntoByteBuffer(ByteArrayOutputStream byteBuffer, java.io.FileInputStream is, int start, int end) throws IOException { //Log.e( TAG, "readIntoByteBuffer:" + (end-start)); if (start != is.skip(start)) { return; } int size = end - start + 1; int buffersize = 1024; byte[] buffer = new byte[buffersize]; int len = 0; while((len = is.read(buffer)) != -1){ if (len > size) { len = size; } byteBuffer.write(buffer, 0, len); size -= len; } } private static void readIntoByteBuffer(ByteArrayOutputStream byteBuffer, SessionInputBuffer sib) throws IOException { //Log.e( TAG, "readIntoByteBuffer:"); int buffersize = 1024; byte[] buffer = new byte[buffersize]; int len = 0; while((len = sib.read(buffer)) != -1){ byteBuffer.write(buffer, 0, len); } } private void sendResponse(Address us, int code, String statusString, String uid, byte[] body) { MemorySessionOutputBuffer outBuf = new MemorySessionOutputBuffer(); HttpMessageWriter writer = new HttpResponseWriter(outBuf, lineFormatter, params); HttpMessage response = new BasicHttpResponse(new BasicStatusLine(PROTOCOL_VERSION, code, statusString)); response.addHeader("Request-Id", uid); try { writer.write(response); outBuf.write(body); outBuf.flush(); } catch (IOException e) { throw new RuntimeException(e); } catch (HttpException e) { throw new RuntimeException(e); } byte[] data = outBuf.getOutput(); Message message = new Message(""); message.setFrom(us); debug("send response " + statusString + " for " + uid); mChatSession.sendDataAsync(message, true, data); } public void onIncomingResponse(Address from, Address to, byte[] value) { //Log.e( TAG, "onIncomingResponse:" + value.length); SessionInputBuffer buffer = new MemorySessionInputBuffer(value); HttpResponseParser parser = new HttpResponseParser(buffer, lineParser, responseFactory, params); HttpResponse res; try { res = (HttpResponse) parser.parse(); } catch (IOException e) { throw new RuntimeException(e); } catch (HttpException e) { e.printStackTrace(); return; } String uid = res.getFirstHeader("Request-Id").getValue(); Request request = requestCache.getIfPresent(uid); if (request == null) { debug("Unknown request ID " + uid); return; } if (request.isSeen()) { debug("Already seen request ID " + uid); return; } request.seen(); int statusCode = res.getStatusLine().getStatusCode(); if (statusCode != 200) { debug("got status " + statusCode + ": " + res.getStatusLine().getReasonPhrase()); // TODO handle error return; } // TODO handle success try { ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream(); readIntoByteBuffer(byteBuffer, buffer); debug("Received sha1 @" + request.start + " is " + sha1sum(byteBuffer.toByteArray())); if (request.method.equals("GET")) { VfsTransfer transfer = transferCache.getIfPresent(request.url); if (transfer == null) { debug("Transfer expired for url " + request.url); return; } transfer.chunkReceived(request, byteBuffer.toByteArray()); if (transfer.isDone()) { //Log.e( TAG, "onIncomingResponse: isDone"); debug("Transfer complete for " + request.url); String filename = transfer.closeFile(); Uri vfsUri = IocVfs.vfsUri(filename); if (transfer.checkSum()) { //Log.e( TAG, "onIncomingResponse: writing"); if (mDataListener != null) mDataListener.onTransferComplete( false, null, mChatSession.getParticipant().getAddress().getAddress(), transfer.url, transfer.type, vfsUri.toString()); } else { if (mDataListener != null) mDataListener.onTransferFailed( false, null, mChatSession.getParticipant().getAddress().getAddress(), transfer.url, "checksum"); debug( "Wrong checksum for file"); } } else { if (mDataListener != null) mDataListener.onTransferProgress(true, null, mChatSession.getParticipant().getAddress().getAddress(), transfer.url, ((float)transfer.chunksReceived) / transfer.chunks); transfer.perform(); debug("Progress " + transfer.chunksReceived + " / " + transfer.chunks); } } } catch (IOException e) { debug("Could not read line from response"); } catch (RemoteException e) { debug("Could not read remote exception"); } } private String getFilenameFromUrl(String url) { String[] path = url.split("/"); String sanitizedPath = SystemServices.sanitize(path[path.length - 1]); return sanitizedPath; } /** private File writeDataToStorage (String url, byte[] data) { debug( "writeDataToStorage:" + url + " " + data.length); String[] path = url.split("/"); String sanitizedPath = SystemServices.sanitize(path[path.length - 1]); File fileDownloadsDir = new File(Environment.DIRECTORY_DOWNLOADS); fileDownloadsDir.mkdirs(); info.guardianproject.iocipher.File file = new info.guardianproject.iocipher.File(fileDownloadsDir, sanitizedPath); debug( "writeDataToStorage:" + file.getAbsolutePath() ); try { OutputStream output = (new info.guardianproject.iocipher.FileOutputStream(file)); output.write(data); output.flush(); output.close(); return file; } catch (IOException e) { OtrDebugLogger.log("error writing file", e); return null; } }*/ @Override public void offerData(String id, Address us, String localUri, Map<String, String> headers) throws IOException { // TODO stash localUri and intended recipient long length = new java.io.File(localUri).length(); if (length > MAX_TRANSFER_LENGTH) { throw new IOException("Length too large: " + length); } if (headers == null) headers = Maps.newHashMap(); headers.put("File-Length", String.valueOf(length)); try { java.io.FileInputStream is = new java.io.FileInputStream(localUri); headers.put("File-Hash-SHA1", sha1sum(is)); is.close(); String[] paths = localUri.split("/"); String url = URI_PREFIX_OTR_IN_BAND + SystemServices.sanitize(paths[paths.length - 1]); Request request = new Request("OFFER", us, url, headers); offerCache.put(url, new Offer(id, localUri, request)); sendRequest(request); } catch (IOException e) { Log.e(ImApp.LOG_TAG,"error opening file",e); } } public Request performGetData(Address us, String url, Map<String, String> headers, int start, int end) { String rangeSpec = "bytes=" + start + "-" + end; headers.put("Range", rangeSpec); Request request = new Request("GET", us, url, start, end, headers, EMPTY_BODY); sendRequest(request); return request; } static class Offer { private String mId; private String mUri; private Request request; public Offer(String id, String uri, Request request) { this.mId = id; this.mUri = uri; this.request = request; } public String getUri() { return mUri; } public String getId() { return mId; } public Request getRequest() { return request; } public void seen() { request.seen(); } } static class Request { public Request(String method, Address us, String url, int start, int end, Map<String, String> headers, byte[] body) { this.method = method; this.url = url; this.start = start; this.end = end; this.us = us; this.headers = headers; this.body = body; } public Request(String method, Address us, String url, Map<String, String> headers) { this(method, us, url, -1, -1, headers, null); } public String method; public String url; public int start; public int end; public byte[] data; public boolean seen = false; public Address us; public Map<String, String> headers; public byte[] body; public boolean isSeen() { return seen; } public void seen() { seen = true; } } public class Transfer { public final String TAG = Transfer.class.getSimpleName(); public String url; public String type; public int chunks = 0; public int chunksReceived = 0; private int length = 0; private int current = 0; private Address us; protected Set<Request> outstanding; private byte[] buffer; protected String sum; public Transfer(String url, String type, int length, Address us, String sum) { this.url = url; this.type = type; this.length = length; this.us = us; this.sum = sum; //Log.e(TAG, "url:"+url + " type:"+ type + " length:"+length) ; if (length > MAX_TRANSFER_LENGTH || length <= 0) { throw new RuntimeException("Invalid transfer size " + length); } chunks = ((length - 1) / MAX_CHUNK_LENGTH) + 1; buffer = new byte[length]; outstanding = Sets.newHashSet(); } public boolean checkSum() { return sum.equals(sha1sum(buffer)); } public boolean perform() { // TODO global throttle rather than this local hack while (outstanding.size() < MAX_OUTSTANDING) { if (current >= length) return false; int end = current + MAX_CHUNK_LENGTH - 1; if (end >= length) { end = length - 1; } Map<String, String> headers = Maps.newHashMap(); Request request= performGetData(us, url, headers, current, end); outstanding.add(request); current = end + 1; } return true; } public boolean isDone() { //Log.e( TAG, "isDone:" + chunksReceived + " " + chunks); return chunksReceived == chunks; } public void chunkReceived(Request request, byte[] bs) { //Log.e( TAG, "chunkReceived:" + bs.length); chunksReceived++; System.arraycopy(bs, 0, buffer, request.start, bs.length); outstanding.remove(request); } public String getSum() { return sum; } } public class VfsTransfer extends Transfer { String localFilename; private RandomAccessFile raf; public VfsTransfer(String url, String type, int length, Address us, String sum) throws FileNotFoundException { super(url, type, length, us, sum); } @Override public void chunkReceived(Request request, byte[] bs) { debug( "chunkReceived: start: :" + request.start + " length " + bs.length) ; chunksReceived++; try { raf.seek( request.start ); raf.write(bs) ; } catch (IOException e) { e.printStackTrace(); } outstanding.remove(request); } @Override public boolean checkSum() { try { File file = new File(localFilename); return sum.equals( checkSum(file.getAbsolutePath()) ); } catch (IOException e) { debug("checksum IOException"); return false; } } @Override public boolean perform() { boolean result = super.perform(); try { if (raf == null) { raf = openFile(url); } } catch (FileNotFoundException e) { e.printStackTrace(); return false; } return result; } private RandomAccessFile openFile(String url) throws FileNotFoundException { debug( "openFile: url " + url) ; String filename = getFilenameFromUrl(url); localFilename = getLocalFilename(filename); debug( "openFile: localFilename " + localFilename) ; info.guardianproject.iocipher.RandomAccessFile ras = new info.guardianproject.iocipher.RandomAccessFile(localFilename, "rw"); return ras; } private String getLocalFilename(String filename) { int count = 0 ; String localFilename; File file; do { localFilename = getLocalFilename(filename, count++); file = new File(localFilename); } while(file.exists()); return localFilename; } private String getLocalFilename(String filename, int count) { String root = "/" + Environment.DIRECTORY_DOWNLOADS + "/"; if (count == 0 ) { return root + filename ; } int lastDot = filename.lastIndexOf("."); String name = filename.substring(0,lastDot); String ext = filename.substring(lastDot); return root + name + "(" + count + ")" + ext; } public String closeFile() throws IOException { //Log.e(TAG, "closeFile") ; raf.close(); File file = new File(localFilename); String newPath = file.getCanonicalPath(); if(true) return newPath; newPath = newPath.substring(0,newPath.length()-4); // remove the .tmp //Log.e(TAG, "vfsCloseFile: rename " + newPath) ; File newPathFile = new File(newPath); boolean success = file.renameTo(newPathFile); if (!success) { throw new IOException("Rename error " + newPath ); } return newPath; } private String checkSum(String filename) throws IOException { FileInputStream fis = new FileInputStream(new File(filename)); String sum = sha1sum(fis); fis.close(); return sum; } } Cache<String, Offer> offerCache = CacheBuilder.newBuilder().maximumSize(100).build(); Cache<String, Request> requestCache = CacheBuilder.newBuilder().maximumSize(100).build(); Cache<String, VfsTransfer> transferCache = CacheBuilder.newBuilder().maximumSize(100).build(); private void sendRequest(Request request) { MemorySessionOutputBuffer outBuf = new MemorySessionOutputBuffer(); HttpMessageWriter writer = new HttpRequestWriter(outBuf, lineFormatter, params); HttpMessage req = new BasicHttpRequest(request.method, request.url, PROTOCOL_VERSION); String uid = UUID.randomUUID().toString(); req.addHeader("Request-Id", uid); if (request.headers != null) { for (Entry<String, String> entry : request.headers.entrySet()) { req.addHeader(entry.getKey(), entry.getValue()); } } try { writer.write(req); outBuf.write(request.body); outBuf.flush(); } catch (IOException e) { throw new RuntimeException(e); } catch (HttpException e) { throw new RuntimeException(e); } byte[] data = outBuf.getOutput(); Message message = new Message(""); message.setFrom(request.us); if (req.containsHeader("Range")) debug("send request " + request.method + " " + request.url + " " + req.getFirstHeader("Range")); else debug("send request " + request.method + " " + request.url); requestCache.put(uid, request); mChatSession.sendDataAsync(message, false, data); } private static String hexChr(int b) { return Integer.toHexString(b & 0xF); } private static String toHex(int b) { return hexChr((b & 0xF0) >> 4) + hexChr(b & 0x0F); } private String sha1sum(byte[] bytes) { MessageDigest digest; try { digest = MessageDigest.getInstance("SHA1"); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } digest.update(bytes, 0, bytes.length); byte[] sha1sum = digest.digest(); String display = ""; for(byte b : sha1sum) display += toHex(b); return display; } private String sha1sum(java.io.InputStream is) { MessageDigest digest; try { digest = MessageDigest.getInstance("SHA1"); DigestInputStream dig = new DigestInputStream(is, digest); IOUtils.copy( dig, new NullOutputStream() ); byte[] sha1sum = digest.digest(); String display = ""; for(byte b : sha1sum) display += toHex(b); return display; } catch (Exception npe) { Log.e(ImApp.LOG_TAG,"unable to hash file",npe); return null; } } private String sha1sum(java.io.FileInputStream fis) throws IOException { MessageDigest digest; try { digest = MessageDigest.getInstance("SHA1"); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } int read; int SIZE = 1024; byte[] bytes = new byte[SIZE]; while ((read = fis.read(bytes)) != -1) { digest.update(bytes, 0, read); } byte[] sha1sum = digest.digest(); String display = ""; for(byte b : sha1sum) display += toHex(b); return display; } private void debug (String msg) { if (Debug.DEBUG_ENABLED) Log.d(ImApp.LOG_TAG,msg); } }
package info.tregmine.listeners; import org.bukkit.*; import org.bukkit.block.Block; import org.bukkit.event.*; import org.bukkit.event.player.*; import org.bukkit.inventory.ItemStack; import info.tregmine.Tregmine; import info.tregmine.api.*; import info.tregmine.api.returns.BooleanStringReturn; public class CompassListener implements Listener { public enum CompassMode { OnTop, Precision; } private Tregmine plugin; private CompassMode mode = CompassMode.Precision; public CompassListener(Tregmine instance) { this.plugin = instance; } @SuppressWarnings("deprecation") @EventHandler public void onPlayerAnimation(PlayerAnimationEvent event) { if (event.getAnimationType() != PlayerAnimationType.ARM_SWING) { return; } TregminePlayer player = plugin.getPlayer(event.getPlayer()); ItemStack heldItem = player.getItemInHand(); if (heldItem.getType() != Material.COMPASS) { return; } World world = player.getWorld(); if (plugin.getRulelessWorld().getName().equalsIgnoreCase(world.getName()) && !player.getRank().canTeleportBetweenWorlds()) { player.sendMessage(ChatColor.RED + "You can not use a compass in this world!"); return; } if (player.getRank().canUseEnhancedCompass()) { float pitch = event.getPlayer().getLocation().getPitch(); float yaw = event.getPlayer().getLocation().getYaw(); TargetBlock targetCalc = new TargetBlock(event.getPlayer()); Block target = targetCalc.getTargetBlock(); if (target != null) { for (int i=0; i<100; i++) { int landingType = world.getBlockAt(target.getX(), target.getY() + i, target.getZ()).getTypeId(); int landingAbove = world.getBlockAt(target.getX(), target.getY() + i + 1, target.getZ()).getTypeId(); if (landingType == 0 && landingAbove == 0) { Location loc = target.getLocation(); loc.setX(loc.getX() + .5); loc.setZ(loc.getZ() + .5); loc.setY(loc.getY() + i); loc.setPitch(pitch); loc.setYaw(yaw); if (loc.getY() < 255) { player.teleportWithHorse(loc); } break; } } } } else if (player.getRank().canUseCompass()) { Block target = player.getDelegate().getTargetBlock(null, 300); Block b1 = world.getBlockAt(new Location(player.getWorld(), target.getX(), target.getY() + 1, target.getZ())); Block b2 = world.getBlockAt(new Location(player.getWorld(), target.getX(), target.getY() + 2, target.getZ())); BooleanStringReturn returnValue = player.canBeHere(target.getLocation()); if (!returnValue.getBoolean()) { player.sendMessage(returnValue.getString()); return; } if (mode == CompassMode.OnTop) { int top = world.getHighestBlockYAt(target.getLocation()); Location loc = new Location(player.getWorld(), target.getX() + 0.5, top, target.getZ() + 0.5, player.getLocation().getYaw(), player.getLocation().getPitch()); player.teleportWithHorse(loc); } else if (mode == CompassMode.Precision) { if ((b1.getType() == Material.AIR && (b2.getType() == Material.AIR || b2.getType() == Material.TORCH)) || target.getY() == 127) { Location loc = new Location(player.getWorld(), target.getX() + 0.5, target.getY() + 1, target.getZ() + 0.5, player.getLocation().getYaw(), player.getLocation().getPitch()); player.teleportWithHorse(loc); } else { player.sendMessage(ChatColor.RED + "I think its a stupid idea to teleport in to a wall"); } } } } }
package org.dita.dost.ant; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.NullOutputStream; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Project; import org.apache.tools.ant.Task; import org.apache.tools.ant.taskdefs.Expand; import org.apache.tools.ant.taskdefs.Get; import org.dita.dost.platform.Plugins; import org.dita.dost.platform.Registry; import org.dita.dost.platform.Registry.Dependency; import org.dita.dost.platform.SemVer; import org.dita.dost.util.Configuration; import org.w3c.dom.Document; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.*; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; public final class PluginInstallTask extends Task { private List<String> registries; private File tempDir; private final ObjectMapper mapper = new ObjectMapper(); private List<String> installedPlugins; private Path pluginFile; private URL pluginUrl; private String pluginName; private SemVer pluginVersion; @Override public void init() { registries = Arrays.stream(Configuration.configuration.get("registry").trim().split("\\s+")) .map(registry -> registry.endsWith("/") ? registry : (registry + "/")) .collect(Collectors.toList()); try { tempDir = Files.createTempDirectory(null).toFile(); } catch (IOException e) { throw new BuildException("Failed to create temporary directory: " + e.getMessage(), e); } installedPlugins = Plugins.getInstalledPlugins(); } private void cleanUp() { if (tempDir != null) { try { FileUtils.deleteDirectory(tempDir); } catch (IOException e) { throw new BuildException(e); } } } @Override public void execute() throws BuildException { if (pluginFile == null && pluginUrl == null && pluginName == null) { throw new BuildException(new IllegalStateException("pluginName argument not set")); } try { final String name; final File tempPluginDir; if (pluginFile != null && Files.exists(pluginFile)) { tempPluginDir = unzip(pluginFile.toFile()); name = getPluginName(tempPluginDir); } else if (pluginUrl != null) { final File tempFile = get(pluginUrl, null); tempPluginDir = unzip(tempFile); name = getPluginName(tempPluginDir); } else { final Registry plugin = readRegistry(this.pluginName, pluginVersion); final File tempFile = get(plugin.url, plugin.cksum); tempPluginDir = unzip(tempFile); name = plugin.name; } final File pluginDir = getPluginDir(name); if (pluginDir.exists()) { throw new BuildException(new IllegalStateException(String.format("Plug-in %s already installed: %s", name, pluginDir))); } Files.move(tempPluginDir.toPath(), pluginDir.toPath()); } catch (IOException e) { throw new BuildException(e.getMessage(), e); } finally { cleanUp(); } } private String getFileHash(final File file) { try (DigestInputStream digestInputStream = new DigestInputStream(new BufferedInputStream( new FileInputStream(file)), MessageDigest.getInstance("SHA-256"))) { IOUtils.copy(digestInputStream, new NullOutputStream()); final MessageDigest digest = digestInputStream.getMessageDigest(); final byte[] sha256 = digest.digest(); return printHexBinary(sha256); } catch (NoSuchAlgorithmException e) { throw new IllegalArgumentException(e); } catch (IOException e) { throw new BuildException("Failed to calculate file checksum: " + e.getMessage(), e); } } private String printHexBinary(final byte[] md5) { final StringBuilder sb = new StringBuilder(); for (byte b : md5) { sb.append(String.format("%02X", b)); } return sb.toString().toLowerCase(); } private String getPluginName(final File pluginDir) { final File config = new File(pluginDir, "plugin.xml"); try { final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(config); return doc.getDocumentElement().getAttribute("id"); } catch (SAXException | IOException | ParserConfigurationException e) { throw new BuildException("Failed to read plugin name: " + e.getMessage(), e); } } private File getPluginDir(final String id) { return Paths.get(getProject().getProperty("dita.dir"), "plugins", id).toFile(); } private Registry readRegistry(final String name, final SemVer version) { Registry res = null; for (final String registry : registries) { final URI registryUrl = URI.create(registry + name + ".json"); log(String.format("Read registry %s", registry), Project.MSG_DEBUG); try (BufferedInputStream in = new BufferedInputStream(registryUrl.toURL().openStream())) { log("Parse registry", Project.MSG_DEBUG); final List<Registry> regs = Arrays.asList(mapper.readValue(in, Registry[].class)); final Optional<Registry> reg = findPlugin(regs, version); if (reg.isPresent()) { final Registry plugin = reg.get(); log(String.format("Plugin found at %s@%s", registryUrl, plugin.vers), Project.MSG_INFO); res = plugin; break; } } catch (MalformedURLException e) { log(String.format("Invalid registry URL %s: %s", registryUrl, e.getMessage()), e, Project.MSG_ERR); } catch (FileNotFoundException e) { log(String.format("Registry configuration %s not found", registryUrl), e, Project.MSG_DEBUG); } catch (IOException e) { log(String.format("Failed to read registry configuration %s: %s", registryUrl, e.getMessage()), e, Project.MSG_ERR); } } if (res == null) { throw new BuildException("Unable to find plugin " + pluginFile); } res.deps.stream() .filter(dep -> !installedPlugins.contains(dep.name)) .forEach(dep -> log(String.format("Dependency %s not installed", dep.name), Project.MSG_WARN)); return res; } private File get(final URL url, final String expectedChecksum) { final File tempPluginFile = new File(tempDir, "plugin.zip"); final Get get = new Get(); get.setProject(getProject()); get.setTaskName("get"); get.setSrc(url); get.setDest(tempPluginFile); get.setIgnoreErrors(false); get.setVerbose(false); get.execute(); if (expectedChecksum != null) { final String checksum = getFileHash(tempPluginFile); if (!checksum.equalsIgnoreCase(expectedChecksum)) { throw new BuildException(new IllegalArgumentException(String.format("Downloaded plugin file checksum %s does not match expected value %s", checksum, expectedChecksum))); } } return tempPluginFile; } private File unzip(final File input) { final File tempPluginDir = new File(tempDir, "plugin"); final Expand unzip = new Expand(); unzip.setProject(getProject()); unzip.setTaskName("unzip"); unzip.setSrc(input); unzip.setDest(tempPluginDir); unzip.execute(); return findBaseDir(tempPluginDir); } private File findBaseDir(final File tempPluginDir) { final File config = new File(tempPluginDir, "plugin.xml"); if (config.exists()) { return tempPluginDir; } else { for (final File dir : tempPluginDir.listFiles(File::isDirectory)) { final File res = findBaseDir(dir); if (res != null) { return res; } } return null; } } private Optional<Registry> findPlugin(final List<Registry> regs, final SemVer version) { if (version == null) { return regs.stream() .filter(this::matchingPlatformVersion) .max(Comparator.comparing(o -> o.vers)); } else { return regs.stream() .filter(this::matchingPlatformVersion) .filter(reg -> reg.vers.equals(version)) .findFirst(); } } @VisibleForTesting boolean matchingPlatformVersion(final Registry reg) { final Optional<Dependency> platformDependency = reg.deps.stream() .filter(dep -> dep.name.equals("org.dita.base")) .findFirst(); if (platformDependency.isPresent()) { final SemVer platform = new SemVer(Configuration.configuration.get("otversion")); final Dependency dep = platformDependency.get(); return dep.req.contains(platform); } else { return true; } } public void setPluginFile(final String pluginFile) { this.pluginFile = Paths.get(pluginFile); try { final URI uri = new URI(pluginFile); if (uri.isAbsolute()) { this.pluginUrl = uri.toURL(); } } catch (MalformedURLException | URISyntaxException e) { // Ignore } if (pluginFile.contains("@")) { final String[] tokens = pluginFile.split("@"); pluginName = tokens[0]; pluginVersion = new SemVer(tokens[1]); } else { pluginName = pluginFile; pluginVersion = null; } } }
package org.folio.rest.impl; import io.vertx.core.AsyncResult; import io.vertx.core.Context; import io.vertx.core.Handler; import org.folio.cataloging.business.codetable.ValueLabelElement; import org.folio.cataloging.log.Log; import org.folio.cataloging.log.MessageCatalog; import org.folio.rest.jaxrs.model.LogicalViewCollection; import org.folio.rest.jaxrs.model.View; import org.folio.rest.jaxrs.resource.LogicalViewsResource; import javax.ws.rs.core.Response; import java.util.Map; import java.util.function.Function; import static java.util.stream.Collectors.toList; import static org.folio.cataloging.integration.CatalogingHelper.doGet; /** * Logical views RESTful APIs. * * @author agazzarini * @since 1.0 */ public class LogicalViewsAPI implements LogicalViewsResource { protected final Log logger = new Log(LogicalViewsAPI.class); // This is the adapter that converts existing value objects (logical views in this case) // in OKAPI resources. private Function<ValueLabelElement, View> adapter = source -> { final View logicalView = new View(); logicalView.setCode(source.getValue()); logicalView.setLongDescription(source.getLabel()); return logicalView; }; @Override public void getLogicalViews( final String lang, final Map<String, String> okapiHeaders, final Handler<AsyncResult<Response>> resultHandler, final Context vertxContext) throws Exception { doGet((storageService, future) -> { try { final LogicalViewCollection container = new LogicalViewCollection(); container.setViews( storageService.getLogicalViews(lang) .stream() .map(adapter) .collect(toList())); return container; } catch (final Exception exception) { logger.error(MessageCatalog._00010_DATA_ACCESS_FAILURE, exception); return null; } }, resultHandler, okapiHeaders, vertxContext); } @Override public void postLogicalViews(String lang, View entity, Map<String, String> okapiHeaders, Handler<AsyncResult<Response>> asyncResultHandler, Context vertxContext) throws Exception { throw new IllegalArgumentException(); } }
// samskivert library - useful routines for java programs // This library is free software; you can redistribute it and/or modify it // (at your option) any later version. // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.samskivert.util; import java.util.AbstractSet; import java.util.Collection; import java.util.Iterator; /** * A base class for {@link IntSet} implementations.<p> * * All you really need to do is implement <tt>interable</tt> and <tt>size</tt>, * although for performance reasons you'll probably want to override <tt>contains</tt>.<p> * * To implement a modifiable IntSet, the programmer must additionally override this class's * <tt>add</tt> and <tt>remove</tt> methods, which will otherwise throw an * <tt>UnsupportedOperationException</tt>.<p> */ public abstract class AbstractIntSet extends AbstractSet<Integer> implements IntSet { /** * {@inheritDoc} * * <p>This implementation iterates over the ints in the collection, checking each one in turn * to see if it's the specified value. */ // from IntSet public boolean contains (int value) { // abstract implementation. You should override. for (Interator it = interator(); it.hasNext(); ) { if (it.nextInt() == value) { return true; } } return false; } // from IntSet public boolean add (int value) { throw new UnsupportedOperationException(); } // from IntSet public boolean remove (int value) { throw new UnsupportedOperationException(); } /** * {@inheritDoc} * * <p>This implementation returns an array containing all the elements returned by the * interator. */ // from IntSet public int[] toIntArray () { int[] vals = new int[size()]; int ii=0; for (Interator intr = interator(); intr.hasNext(); ) { vals[ii++] = intr.nextInt(); } return vals; } @Override // from AbstractSet<Integer> public Iterator<Integer> iterator () { return interator(); } @Override // from AbstractSet<Integer> public boolean contains (Object o) { // let's go ahead and NPE or CCE if an Integer is not specified return /* (o instanceof Integer) && */ contains(((Integer)o).intValue()); } @Override // from AbstractSet<Integer> public boolean add (Integer i) { return add(i.intValue()); // will NPE } @Override // from AbstractSet<Integer> public boolean remove (Object o) { // let's go ahead and NPE or CCE if an Integer is not specified return /* (o instanceof Integer) && */ remove(((Integer)o).intValue()); } @Override // from AbstractSet<Integer> public boolean equals (Object o) { if (o == this) { return true; } if (o instanceof IntSet) { IntSet that = (IntSet)o; return (this.size() == that.size()) && this.containsAll(that); } return super.equals(o); } @Override // from AbstractSet<Integer> public int hashCode () { int h = 0; for (Interator it = interator(); it.hasNext(); ) { h += it.nextInt(); } return h; } @Override // from AbstractSet<Integer> public String toString () { StringBuilder sb = new StringBuilder("["); Interator it = interator(); if (it.hasNext()) { sb.append(it.nextInt()); while (it.hasNext()) { sb.append(", ").append(it.nextInt()); } } return sb.append(']').toString(); } @Override // from AbstractSet<Integer> public boolean containsAll (Collection<?> c) { if (c instanceof Interable) { for (Interator it = ((Interable) c).interator(); it.hasNext(); ) { if (!contains(it.nextInt())) { return false; } } return true; } return super.containsAll(c); } @Override // from AbstractSet<Integer> public boolean addAll (Collection<? extends Integer> c) { if (c instanceof Interable) { boolean modified = false; for (Interator it = ((Interable) c).interator(); it.hasNext(); ) { if (add(it.nextInt())) { modified = true; } } return modified; } return super.addAll(c); } @Override // from AbstractSet<Integer> public boolean removeAll (Collection<?> c) { if (c instanceof Interable) { boolean modified = false; for (Interator it = ((Interable)c).interator(); it.hasNext(); ) { if (remove(it.nextInt())) { modified = true; } } return modified; } return super.removeAll(c); } @Override // from AbstractSet<Integer> public boolean retainAll (Collection<?> c) { if (c instanceof IntSet) { IntSet that = (IntSet)c; boolean modified = false; for (Interator it = interator(); it.hasNext(); ) { if (!that.contains(it.nextInt())) { it.remove(); modified = true; } } return modified; } return super.retainAll(c); } }
package org.hope6537.hadoop; public class HadoopConstants { public static final String HDFS = "HDFS"; public static final String HBASE = "Hbase"; public static final String HIVE = "Hive"; }
package com.yahoo.dtf.results; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintStream; import java.net.URI; import java.text.ParseException; import java.util.Date; import java.util.Iterator; import java.util.Set; import java.util.Map.Entry; import com.yahoo.dtf.DTFProperties; import com.yahoo.dtf.actions.Action; import com.yahoo.dtf.exception.ResultsException; import com.yahoo.dtf.exception.StorageException; import com.yahoo.dtf.util.TimeUtil; public class JUnitResults extends ResultsBase { private URI _uri = null; private PrintStream _xml = null; public JUnitResults(URI uri, boolean savelogs) { super(uri, true); _uri = uri; } public URI getURI() { return _uri; } public void start() throws ResultsException { try { OutputStream os = Action.getStorageFactory().getOutputStream(_uri); _xml = new PrintStream(os); } catch (StorageException e) { throw new ResultsException("Unable to open output file.",e); } } public void stop() throws ResultsException { _xml.close(); /* * now lets beautify the resulting XML. */ } private void startTestSuite(Result result, PrintStream ps) throws ResultsException { ps.print("<testsuite"); ps.print(" name=\"dtf." + result.getName() + "\""); ps.print(" classname=\"dtf." + result.getName() + "\""); ps.print(" tests=\"" + result.getTotalTests() + "\""); try { ps.print(" start=\"" + TimeUtil.dateStampToDateStamp(result.getStart()) + "\""); ps.print(" stop=\"" + TimeUtil.dateStampToDateStamp(result.getStop()) + "\""); } catch (ParseException e) { throw new ResultsException("Error handling date.",e); } ps.print(" time=\"" + result.getDurationInSeconds() + "\""); ps.print(" timestamp=\"" + new Date(result.getStart()) + "\""); ps.print(" passed=\"" + result.getNumPassed() + "\""); ps.print(" failures=\"" + result.getNumFailed() + "\""); ps.print(" errors=\"" + result.getNumFailed() + "\""); ps.println(" disabled=\"" + result.getNumSkipped() + "\">"); } private void printProperties(Result result, PrintStream ps) { Set<Entry<Object,Object>> entries = result.getProperties().entrySet(); for (Entry<Object,Object> entry : entries) { ps.println("<property name=\"" + entry.getKey() + "\" value=\"" + entry.getValue() + "\" />"); } } private boolean hasTestSuite = false; public void recordResult(Result result) throws ResultsException { if ( result.isTestSuite() ) { hasTestSuite = true; startTestSuite(result, _xml); printProperties(result, _xml); printResultNode(result,_xml); Iterator results = result.getResults().iterator(); while (results.hasNext()) { recordResult((Result)results.next()); } String testlogfile = (String) result.getProperties().get(DTFProperties.DTF_TESTCASE_LOG); File log = null; if (testlogfile != null && (log = new File(testlogfile)).exists()) { _xml.println("<system-out><![CDATA["); try { FileInputStream fis = new FileInputStream(log); InputStreamReader isr = new InputStreamReader(fis); BufferedReader br = new BufferedReader(isr); try { String line = null; while ( (line = br.readLine()) != null ) _xml.println(line); } finally { br.close(); } } catch (FileNotFoundException e) { throw new ResultsException("Unable to read log file.",e); } catch (IOException e) { throw new ResultsException("Error reading log file.",e); } _xml.println("]]></system-out>"); } else { _xml.println("<system-out/>"); } _xml.println("<system-err/>"); _xml.println("</testsuite>"); } else if (result.isTestCase()) { if ( !hasTestSuite ) { hasTestSuite = true; startTestSuite(result, _xml); _xml.println("</testsuite>"); } else { /* * Test case output should look like this: * * <testcase name="testAdd" * time="0.018"/> */ _xml.print("<testcase name=\"" + result.getName() + "\""); try { _xml.print(" start=\"" + TimeUtil.dateStampToDateStamp(result.getStart()) + "\""); _xml.print(" stop=\"" + TimeUtil.dateStampToDateStamp(result.getStop()) + "\""); } catch (ParseException e) { throw new ResultsException("Error handling date.",e); } _xml.println(" time=\"" + result.getDurationInSeconds() + "\">"); printProperties(result,_xml); printResultNode(result,_xml); _xml.println("</testcase>"); } } } private void printResultNode(Result result, PrintStream ps) { if (result.isFailResult()) { ps.print("<failure>"); if (result.getOutput() != null) ps.print(result.getOutput()); ps.println("</failure>"); } } }
package org.inventivetalent.nbt; import com.google.gson.JsonObject; import lombok.EqualsAndHashCode; import org.inventivetalent.nbt.stream.NBTOutputStream; import java.io.DataOutputStream; import java.io.IOException; import java.lang.reflect.Field; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @EqualsAndHashCode(callSuper = true) public class CompoundTag extends NBTTag<Map<String, NBTTag>> implements Iterable<Map.Entry<String, NBTTag>> { private final Map<String, NBTTag> value; public CompoundTag() { super(""); this.value = new HashMap<>(); } public CompoundTag(Map<String, NBTTag> value) { super(""); this.value = new HashMap<>(value); } public CompoundTag(String name) { super(name); this.value = new HashMap<>(); } public CompoundTag(String name, Map<String, NBTTag> value) { super(name); this.value = new HashMap<>(value); } @Override public Map<String, NBTTag> getValue() { return value; } @Override public void setValue(Map<String, NBTTag> value) { this.value.putAll(value); } public NBTTag get(String name) { return value.get(name); } public void set(String name, NBTTag tag) { this.value.put(name, tag); } public void set(String name, byte b) { set(name, new ByteTag(name, b)); } public void set(String name, short s) { set(name, new ShortTag(name, s)); } public void set(String name, int i) { set(name, new IntTag(name, i)); } public void set(String name, long l) { set(name, new LongTag(name, l)); } public void set(String name, float f) { set(name, new FloatTag(name, f)); } public void set(String name, double d) { set(name, new DoubleTag(name, d)); } public void set(String name, String string) { set(name, new StringTag(name, string)); } public void set(String name, byte[] b) { set(name, new ByteArrayTag(name, b)); } public void set(String name, int[] i) { set(name, new IntArrayTag(name, i)); } public void set(String name, boolean b) { set(name, (byte) (b ? 1 : 0)); } @Override public JsonObject asJson() { JsonObject jsonObject = new JsonObject(); for (Map.Entry<String, NBTTag> entry : value.entrySet()) { jsonObject.add(entry.getKey(), entry.getValue().asJson()); } return jsonObject; } @Override public void write(NBTOutputStream nbtOut, DataOutputStream out) throws IOException { for (NBTTag tag : value.values()) { nbtOut.writeTag(tag); } out.writeByte(TagID.TAG_END); } @Override public int getTypeId() { return TagID.TAG_COMPOUND; } @Override public String getTypeName() { return "TAG_Compund"; } @Override public Iterator<Map.Entry<String, NBTTag>> iterator() { return value.entrySet().iterator(); } public String getNMSClass() { return "NBTTagCompound"; } @Override public CompoundTag fromNMS(Object nms) throws ReflectiveOperationException { Class<?> clazz = NMS_CLASS_RESOLVER.resolve(getNMSClass()); Class<?> nbtBaseClass = NMS_CLASS_RESOLVER.resolve("NBTBase"); Field field = clazz.getDeclaredField("map"); field.setAccessible(true); Map<String, Object> nmsMap = (Map<String, Object>) field.get(nms); for (Map.Entry<String, Object> nmsEntry : nmsMap.entrySet()) { byte typeId = (byte) nbtBaseClass.getMethod("getTypeId").invoke(nmsEntry.getValue()); if (typeId == TagID.TAG_LIST) { set(nmsEntry.getKey(), new ListTag(typeId, nmsEntry.getKey()).fromNMS(nmsEntry.getValue())); } else if (typeId == TagID.TAG_STRING) { set(nmsEntry.getKey(), new StringTag(nmsEntry.getKey(), "null").fromNMS(nmsEntry.getValue())); } else { set(nmsEntry.getKey(), NBTTag.forType(typeId).newInstance().fromNMS(nmsEntry.getValue())); } } return this; } @Override public Object toNMS() throws ReflectiveOperationException { Class<?> clazz = NMS_CLASS_RESOLVER.resolve(getNMSClass()); Field field = clazz.getDeclaredField("map"); field.setAccessible(true); Object nms = clazz.newInstance(); Map map = (Map) field.get(nms); for (Map.Entry<String, NBTTag> entry : this) { System.out.println(entry); map.put(entry.getKey(), entry.getValue().toNMS()); } System.out.println(map); System.out.println("Set!"); field.set(nms, map); return nms; } }
package org.lantern.http; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.URL; import java.util.Arrays; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.StringUtils; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.lantern.Censored; import org.lantern.ConnectivityChangedEvent; import org.lantern.JsonUtils; import org.lantern.LanternClientConstants; import org.lantern.LanternFeedback; import org.lantern.LanternUtils; import org.lantern.XmppHandler; import org.lantern.event.Events; import org.lantern.event.InvitesChangedEvent; import org.lantern.event.ResetEvent; import org.lantern.state.Connectivity; import org.lantern.state.InternalState; import org.lantern.state.InviteQueue; import org.lantern.state.JsonModelModifier; import org.lantern.state.LocationChangedEvent; import org.lantern.state.Modal; import org.lantern.state.Mode; import org.lantern.state.Model; import org.lantern.state.ModelIo; import org.lantern.state.ModelService; import org.lantern.state.Notification.MessageType; import org.lantern.state.Settings; import org.lantern.state.SyncPath; import org.lantern.util.Desktop; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.Subscribe; import com.google.inject.Inject; import com.google.inject.Singleton; @Singleton public class InteractionServlet extends HttpServlet { private final InternalState internalState; // XXX DRY: these are also defined in lantern-ui/app/js/constants.js private enum Interaction { GET, GIVE, INVITE, CONTINUE, SETTINGS, CLOSE, RESET, SET, PROXIEDSITES, CANCEL, LANTERNFRIENDS, RETRY, REQUESTINVITE, CONTACT, ABOUT, ACCEPT, DECLINE, UNEXPECTEDSTATERESET, UNEXPECTEDSTATEREFRESH, URL, EXCEPTION } // modals the user can switch to from other modals private static final HashSet<Modal> switchModals = new HashSet<Modal>(); static { switchModals.add(Modal.about); switchModals.add(Modal.contact); switchModals.add(Modal.settings); switchModals.add(Modal.proxiedSites); switchModals.add(Modal.lanternFriends); } private final Logger log = LoggerFactory.getLogger(getClass()); /** * Generated serialization ID. */ private static final long serialVersionUID = -8820179746803371322L; private final ModelService modelService; private final Model model; private final ModelIo modelIo; private final XmppHandler xmppHandler; private final Censored censored; private final LanternFeedback lanternFeedback; private final InviteQueue inviteQueue; /* only open external urls to these hosts: */ private static final HashSet<String> allowedDomains = new HashSet<String>( Arrays.asList("google.com", "github.com", "getlantern.org")); @Inject public InteractionServlet(final Model model, final ModelService modelService, final InternalState internalState, final ModelIo modelIo, final XmppHandler xmppHandler, final Censored censored, final LanternFeedback lanternFeedback, final InviteQueue inviteQueue) { this.model = model; this.modelService = modelService; this.internalState = internalState; this.modelIo = modelIo; this.xmppHandler = xmppHandler; this.censored = censored; this.lanternFeedback = lanternFeedback; this.inviteQueue = inviteQueue; Events.register(this); } @Override protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException { processRequest(req, resp); } @Override protected void doPost(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException { processRequest(req, resp); } protected void processRequest(final HttpServletRequest req, final HttpServletResponse resp) { LanternUtils.addCSPHeader(resp); final String uri = req.getRequestURI(); log.debug("Received URI: {}", uri); final String interactionStr = StringUtils.substringAfterLast(uri, "/"); if (StringUtils.isBlank(interactionStr)) { log.debug("blank interaction"); HttpUtils.sendClientError(resp, "blank interaction"); return; } log.debug("Headers: "+HttpUtils.getRequestHeaders(req)); if (!"XMLHttpRequest".equals(req.getHeader("X-Requested-With"))) { log.debug("invalid X-Requested-With"); HttpUtils.sendClientError(resp, "invalid X-Requested-With"); return; } if (!model.getXsrfToken().equals(req.getHeader("X-XSRF-TOKEN"))) { log.debug("X-XSRF-TOKEN wrong: got {} expected {}", req.getHeader("X-XSRF-TOKEN"), model.getXsrfToken()); HttpUtils.sendClientError(resp, "invalid X-XSRF-TOKEN"); return; } final int cl = req.getContentLength(); String json = ""; if (cl > 0) { try { json = IOUtils.toString(req.getInputStream()); } catch (final IOException e) { log.error("Could not parse json?"); } } log.debug("Body: '"+json+"'"); final Interaction inter = Interaction.valueOf(interactionStr.toUpperCase()); if (inter == Interaction.CLOSE) { if (handleClose(json)) { return; } } if (inter == Interaction.URL) { final String url = JsonUtils.getValueFromJson("url", json); final URL url_; if (!StringUtils.startsWith(url, "http: !StringUtils.startsWith(url, "https: log.error("http(s) url expected, got {}", url); HttpUtils.sendClientError(resp, "http(s) urls only"); return; } try { url_ = new URL(url); } catch (MalformedURLException e) { log.error("invalid url: {}", url); HttpUtils.sendClientError(resp, "invalid url"); return; } final String host = url_.getHost(); final String[] hostParts = StringUtils.split(host, "."); final String domain = hostParts[hostParts.length-2] + "." + hostParts[hostParts.length-1]; if (!allowedDomains.contains(domain)) { log.error("domain not allowed: {}", domain); HttpUtils.sendClientError(resp, "domain not allowed"); return; } final String cmd; if (SystemUtils.IS_OS_MAC_OSX) { cmd = "open"; } else if (SystemUtils.IS_OS_LINUX) { cmd = "gnome-open"; } else if (SystemUtils.IS_OS_WINDOWS) { cmd = "start"; } else { log.error("unsupported OS"); HttpUtils.sendClientError(resp, "unsupported OS"); return; } try { if (SystemUtils.IS_OS_WINDOWS) { // On Windows, we have to quote the url to allow for // e.g. ? and & characters in query string params. // To quote the url, we supply a dummy first argument, // since otherwise start treats the first argument as a // title for the new console window when it's quoted. LanternUtils.runCommand(cmd, "\"\"", "\""+url+"\""); } else { // on OS X and Linux, special characters in the url make // it through this call without our having to quote them. LanternUtils.runCommand(cmd, url); } } catch (IOException e) { log.error("open url failed"); HttpUtils.sendClientError(resp, "open url failed"); return; } return; } final Modal modal = this.model.getModal(); log.debug("processRequest: modal = {}, inter = {}, mode = {}", modal, inter, this.model.getSettings().getMode()); if (handleExceptionalInteractions(modal, inter, json)) { return; } Modal switchTo = null; try { // XXX a map would make this more robust switchTo = Modal.valueOf(interactionStr); } catch (IllegalArgumentException e) { } if (switchTo != null && switchModals.contains(switchTo)) { if (!switchTo.equals(modal)) { if (!switchModals.contains(modal)) { this.internalState.setLastModal(modal); } Events.syncModal(model, switchTo); } return; } switch (modal) { case welcome: this.model.getSettings().setMode(Mode.unknown); switch (inter) { case GET: log.debug("Setting get mode"); handleSetModeWelcome(Mode.get); break; case GIVE: log.debug("Setting give mode"); handleSetModeWelcome(Mode.give); break; } break; case authorize: log.debug("Processing authorize modal..."); this.internalState.setModalCompleted(Modal.authorize); this.internalState.advanceModal(null); break; case finished: this.internalState.setCompletedTo(Modal.finished); switch (inter) { case CONTINUE: log.debug("Processing continue"); this.model.setShowVis(true); Events.sync(SyncPath.SHOWVIS, true); this.internalState.setModalCompleted(Modal.finished); this.internalState.advanceModal(null); break; case SET: log.debug("Processing set in finished modal...applying JSON\n{}", json); applyJson(json); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); HttpUtils.sendClientError(resp, "Interaction not handled for modal: "+modal+ " and interaction: "+inter); break; } break; case firstInviteReceived: log.error("Processing invite received..."); break; case lanternFriends: this.internalState.setCompletedTo(Modal.lanternFriends); switch (inter) { case INVITE: invite(json); Events.sync(SyncPath.NOTIFICATIONS, model.getNotifications()); break; case CONTINUE: /* fall-through */ case CLOSE: log.debug("Processing continue/close for friends dialog"); this.internalState.setModalCompleted(Modal.lanternFriends); this.internalState.advanceModal(null); break; case ACCEPT: acceptInvite(json); Events.syncModal(model, Modal.lanternFriends); break; case DECLINE: declineInvite(json); Events.syncModal(model, Modal.lanternFriends); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); HttpUtils.sendClientError(resp, "Interaction not handled for modal: "+modal+ " and interaction: "+inter); break; } break; case none: break; case notInvited: switch (inter) { case RETRY: Events.syncModal(model, Modal.authorize); break; case REQUESTINVITE: Events.syncModal(model, Modal.requestInvite); break; default: log.error("Unexpected interaction: " + inter); break; } break; case proxiedSites: this.internalState.setCompletedTo(Modal.proxiedSites); switch (inter) { case CONTINUE: this.internalState.setModalCompleted(Modal.proxiedSites); this.internalState.advanceModal(null); break; case LANTERNFRIENDS: log.debug("Processing lanternFriends from proxiedSites"); Events.syncModal(model, Modal.lanternFriends); break; case SET: if (!model.getSettings().isSystemProxy()) { String msg = "Because you are using manual proxy " + "configuration, you may have to restart your " + "browser for your updated proxied sites list " + "to take effect."; model.addNotification(msg, MessageType.info, 30); Events.sync(SyncPath.NOTIFICATIONS, model.getNotifications()); } applyJson(json); break; case SETTINGS: log.debug("Processing settings from proxiedSites"); Events.syncModal(model, Modal.settings); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); HttpUtils.sendClientError(resp, "unexpected interaction for proxied sites"); break; } break; case requestInvite: log.info("Processing request invite"); switch (inter) { case CANCEL: this.internalState.setModalCompleted(Modal.requestInvite); this.internalState.advanceModal(Modal.notInvited); break; case CONTINUE: applyJson(json); this.internalState.setModalCompleted(Modal.proxiedSites); //TODO: need to do something here this.internalState.advanceModal(null); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); HttpUtils.sendClientError(resp, "unexpected interaction for request invite"); break; } break; case requestSent: log.debug("Process request sent"); break; case settings: switch (inter) { case GET: log.debug("Setting get mode"); // Only deal with a mode change if the mode has changed! if (modelService.getMode() == Mode.give) { // Break this out because it's set in the subsequent // setMode call final boolean everGet = model.isEverGetMode(); this.modelService.setMode(Mode.get); if (!everGet) { // need to do more setup to switch to get mode from // give mode model.setSetupComplete(false); model.setModal(Modal.proxiedSites); Events.syncModel(model); } else { // This primarily just triggers a setup complete event, // which triggers connecting to proxies, setting up // the local system proxy, etc. model.setSetupComplete(true); } } break; case GIVE: log.debug("Setting give mode"); this.modelService.setMode(Mode.give); break; case CLOSE: log.debug("Processing settings close"); Events.syncModal(model, Modal.none); break; case SET: log.debug("Processing set in setting...applying JSON\n{}", json); applyJson(json); break; case RESET: log.debug("Processing reset"); Events.syncModal(model, Modal.confirmReset); break; case PROXIEDSITES: log.debug("Processing proxied sites in settings"); Events.syncModal(model, Modal.proxiedSites); break; case LANTERNFRIENDS: log.debug("Processing friends in settings"); Events.syncModal(model, Modal.lanternFriends); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); HttpUtils.sendClientError(resp, "Interaction not handled for modal: "+modal+ " and interaction: "+inter); break; } break; case settingsLoadFailure: switch (inter) { case RETRY: modelIo.reload(); Events.sync(SyncPath.NOTIFICATIONS, model.getNotifications()); Events.syncModal(model, model.getModal()); break; case RESET: backupSettings(); Events.syncModal(model, Modal.welcome); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); break; } break; case systemProxy: this.internalState.setCompletedTo(Modal.systemProxy); switch (inter) { case CONTINUE: log.debug("Processing continue in systemProxy", json); applyJson(json); Events.sync(SyncPath.SYSTEMPROXY, model.getSettings().isSystemProxy()); this.internalState.setModalCompleted(Modal.systemProxy); this.internalState.advanceModal(null); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); HttpUtils.sendClientError(resp, "error setting system proxy pref"); break; } break; case updateAvailable: switch (inter) { case CLOSE: this.internalState.setModalCompleted(Modal.updateAvailable); this.internalState.advanceModal(null); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); break; } break; case authorizeLater: log.error("Did not handle interaction {} for modal {}", inter, modal); break; case confirmReset: log.debug("Handling confirm reset interaction"); switch (inter) { case CANCEL: log.debug("Processing cancel"); Events.syncModal(model, Modal.settings); break; case RESET: handleReset(); Events.syncModel(this.model); break; default: log.error("Did not handle interaction {} for modal {}", inter, modal); HttpUtils.sendClientError(resp, "Interaction not handled for modal: "+modal+ " and interaction: "+inter); } break; case about: switch (inter) { case CLOSE: Events.syncModal(model, this.internalState.getLastModal()); break; default: HttpUtils.sendClientError(resp, "invalid interaction "+inter); } break; case contact: switch(inter) { case CONTINUE: String msg; MessageType messageType; try { lanternFeedback.submit(json, this.model.getProfile().getEmail()); msg = "Thank you for contacting Lantern."; messageType = MessageType.info; } catch(Exception e) { log.error("Error submitting contact form: {}", e); msg = "Error sending message. Please check your "+ "connection and try again."; messageType = MessageType.error; } model.addNotification(msg, messageType, 30); Events.sync(SyncPath.NOTIFICATIONS, model.getNotifications()); // fall through because this should be done in both cases: case CANCEL: Events.syncModal(model, this.internalState.getLastModal()); break; default: HttpUtils.sendClientError(resp, "invalid interaction "+inter); } break; case giveModeForbidden: if (inter == Interaction.CONTINUE) { // need to do more setup to switch to get mode from give mode model.setSetupComplete(false); this.internalState.advanceModal(null); Events.syncModal(model, Modal.proxiedSites); Events.sync(SyncPath.SETUPCOMPLETE, false); } break; default: log.error("No matching modal for {}", modal); } this.modelIo.write(); } private void backupSettings() { try { File backup = new File(Desktop.getDesktopPath(), "lantern-model-backup"); FileUtils.copyFile(LanternClientConstants.DEFAULT_MODEL_FILE, backup); } catch (final IOException e) { log.warn("Could not backup model file."); } } private boolean handleExceptionalInteractions( final Modal modal, final Interaction inter, final String json) { boolean handled = false; Map<String, Object> map; Boolean notify; switch(inter) { case EXCEPTION: handleException(json); handled = true; break; case UNEXPECTEDSTATERESET: log.debug("Handling unexpected state reset."); backupSettings(); handleReset(); Events.syncModel(this.model); // fall through because this should be done in both cases: case UNEXPECTEDSTATEREFRESH: try { map = jsonToMap(json); } catch(Exception e) { log.error("Bad json payload in inter '{}': {}", inter, json); return true; } notify = (Boolean)map.get("notify"); if(notify) { try { lanternFeedback.submit((String)map.get("report"), this.model.getProfile().getEmail()); } catch(Exception e) { log.error("Could not submit unexpected state report: {}\n {}", e.getMessage(), (String)map.get("report")); } } handled = true; break; } return handled; } private void handleException(final String json) { StringBuilder logMessage = new StringBuilder(); Map<String, Object> map; try { map = jsonToMap(json); } catch(Exception e) { log.error("UI Exception (unable to parse json)"); return; } for(Map.Entry<String, Object> entry : map.entrySet()) { logMessage.append( String.format("\t%s: %s\n", entry.getKey(), entry.getValue() ) ); } log.error("UI Exception:\n {}", logMessage.toString()); } private Map<String, Object> jsonToMap(final String json) throws JsonParseException, JsonMappingException, IOException { final ObjectMapper om = new ObjectMapper(); Map<String, Object> map; map = om.readValue(json, Map.class); return map; } private boolean handleClose(String json) { if (StringUtils.isBlank(json)) { return false; } final ObjectMapper om = new ObjectMapper(); Map<String, Object> map; try { map = om.readValue(json, Map.class); final String notification = (String) map.get("notification"); model.closeNotification(Integer.parseInt(notification)); Events.sync(SyncPath.NOTIFICATIONS, model.getNotifications()); return true; } catch (JsonParseException e) { log.warn("Exception closing notifications {}", e); } catch (JsonMappingException e) { log.warn("Exception closing notifications {}", e); } catch (IOException e) { log.warn("Exception closing notifications {}", e); } return false; } private void declineInvite(final String json) { final String email = JsonUtils.getValueFromJson("email", json); this.xmppHandler.unsubscribed(email); } private void acceptInvite(final String json) { final String email = JsonUtils.getValueFromJson("email", json); this.xmppHandler.subscribed(email); // We also automatically subscribe to them in turn so we know about // their presence. this.xmppHandler.subscribe(email); } static class Invite { List<String> invite; public Invite() {} public List<String> getInvite() { return invite; } public void setInvite(List<String> invite) { this.invite = invite; } } private void invite(String json) { ObjectMapper om = new ObjectMapper(); try { if (json.length() == 0) { return;//nobody to invite } ArrayList<String> invites = om.readValue(json, ArrayList.class); inviteQueue.invite(invites); } catch (IOException e) { throw new RuntimeException(e); } } private void handleSetModeWelcome(final Mode mode) { this.model.setModal(Modal.authorize); this.internalState.setModalCompleted(Modal.welcome); this.modelService.setMode(mode); Events.syncModal(model); } private void applyJson(final String json) { final JsonModelModifier mod = new JsonModelModifier(modelService); mod.applyJson(json); } private void handleReset() { // This posts the reset event to any classes that need to take action, // avoiding coupling this class to those classes. Events.eventBus().post(new ResetEvent()); if (LanternClientConstants.DEFAULT_MODEL_FILE.isFile()) { try { FileUtils.forceDelete(LanternClientConstants.DEFAULT_MODEL_FILE); } catch (final IOException e) { log.warn("Could not delete model file?"); } } final Model base = new Model(model.getCountryService()); model.setLaunchd(base.isLaunchd()); model.setModal(base.getModal()); model.setNinvites(base.getNinvites()); model.setNodeId(base.getNodeId()); model.setProfile(base.getProfile()); model.setNproxiedSitesMax(base.getNproxiedSitesMax()); //we need to keep clientID and clientSecret, because they are application-level settings String clientID = model.getSettings().getClientID(); String clientSecret = model.getSettings().getClientSecret(); model.setSettings(base.getSettings()); model.getSettings().setClientID(clientID); model.getSettings().setClientSecret(clientSecret); model.setSetupComplete(base.isSetupComplete()); model.setShowVis(base.isShowVis()); model.clearNotifications(); modelIo.write(); } @Subscribe public void onLocationChanged(final LocationChangedEvent e) { Events.sync(SyncPath.LOCATION, e.getNewLocation()); if (censored.isCountryCodeCensored(e.getNewCountry())) { if (!censored.isCountryCodeCensored(e.getOldCountry())) { //moving from uncensored to censored if (model.getSettings().getMode() == Mode.give) { Events.syncModal(model, Modal.giveModeForbidden); } } } } @Subscribe public void onInvitesChanged(final InvitesChangedEvent e) { int newInvites = e.getNewInvites(); if (e.getOldInvites() == 0) { String invitation = newInvites == 1 ? "invitation" : "invitations"; String text = "You now have " + newInvites + " " + invitation; model.addNotification(text, MessageType.info); } else if (newInvites == 0 && e.getOldInvites() > 0) { model.addNotification("You have no more invitations. You will be notified when you receive more.", MessageType.important); } Events.sync(SyncPath.NOTIFICATIONS, model.getNotifications()); } @Subscribe public void onConnectivityChanged(final ConnectivityChangedEvent e) { Connectivity connectivity = model.getConnectivity(); if (!e.isConnected()) { connectivity.setInternet(false); Events.sync(SyncPath.CONNECTIVITY_INTERNET, false); return; } InetAddress ip = e.getNewIp(); connectivity.setIp(ip.getHostAddress()); connectivity.setInternet(true); Events.sync(SyncPath.CONNECTIVITY, model.getConnectivity()); Settings set = model.getSettings(); if (set.getMode() == null || set.getMode() == Mode.unknown) { if (censored.isCensored()) { set.setMode(Mode.get); } else { set.setMode(Mode.give); } } else if (set.getMode() == Mode.give && censored.isCensored()) { // want to set the mode to get now so that we don't mistakenly // proxy any more than necessary set.setMode(Mode.get); log.info("Disconnected; setting giveModeForbidden"); Events.syncModal(model, Modal.giveModeForbidden); } } }
package org.lantern.monitoring; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.lang.management.OperatingSystemMXBean; import java.lang.reflect.Method; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.StringUtils; import org.lantern.Country; import org.lantern.LanternConstants; import org.lantern.LanternService; import org.lantern.LanternUtils; import org.lantern.event.Events; import org.lantern.monitoring.Stats.Gauges; import org.lantern.state.Mode; import org.lantern.state.Model; import org.lantern.state.SyncPath; import org.lantern.util.Threads; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Inject; import com.google.inject.Singleton; @Singleton public class StatsManager implements LanternService { private static final Logger LOGGER = LoggerFactory .getLogger(StatsManager.class); // Get stats every minute private static final long GET_INTERVAL = 60; // Post stats every 5 minutes private static final long POST_INTERVAL = 5; private final Model model; private final StatshubAPI statshub = new StatshubAPI( LanternUtils.isFallbackProxy() ? null : LanternConstants.LANTERN_LOCALHOST_ADDR); private final MemoryMXBean memoryMXBean = ManagementFactory .getMemoryMXBean(); private final OperatingSystemMXBean osStats = ManagementFactory .getOperatingSystemMXBean(); private final ScheduledExecutorService getScheduler = Threads .newSingleThreadScheduledExecutor("StatsManager-Get"); private final ScheduledExecutorService postScheduler = Threads .newSingleThreadScheduledExecutor("StatsManager-Post"); @Inject public StatsManager(Model model) { this.model = model; } @Override public void start() { getScheduler.scheduleAtFixedRate( getStats, 30, GET_INTERVAL, TimeUnit.SECONDS); postScheduler.scheduleAtFixedRate( postStats, 1, // wait 1 minute before first posting stats, to give the // system a chance to initialize metadata POST_INTERVAL, TimeUnit.MINUTES); } @Override public void stop() { getScheduler.shutdownNow(); postScheduler.shutdownNow(); try { getScheduler.awaitTermination(30, TimeUnit.SECONDS); postScheduler.awaitTermination(30, TimeUnit.SECONDS); } catch (InterruptedException ie) { LOGGER.warn("Unable to await termination of schedulers", ie); } } private final Runnable getStats = new Runnable() { public void run() { try { StatsResponse resp = statshub.getStats("country"); if (resp != null) { Map<String, Stats> countryDim = resp.getDims().get( "country"); if (countryDim != null) { model.setGlobalStats(countryDim.get("total")); for (Country country : model.getCountries().values()) { country.setStats(countryDim.get( country.getCode().toLowerCase())); } Events.sync(SyncPath.GLOBAL_STATS, model.getGlobalStats()); Events.sync(SyncPath.COUNTRIES, model.getCountries()); } } } catch (Exception e) { LOGGER.warn("Unable to getStats: " + e.getMessage(), e); } } }; private final Runnable postStats = new Runnable() { public void run() { // Only report stats if user enabled auto-reporting if (model.getSettings().isAutoReport()) { try { String userGuid = model.getUserGuid(); String countryCode = model.getLocation().getCountry(); if (StringUtils.isBlank(countryCode) || "--".equals(countryCode)) { countryCode = "xx"; } String instanceId = model.getInstanceId(); Stats instanceStats = model.getInstanceStats().toInstanceStats(); addSystemStats(instanceStats); statshub.postInstanceStats( instanceId, userGuid, countryCode, LanternUtils.isFallbackProxy(), instanceStats); if (userGuid != null) { Stats userStats = model.getInstanceStats() .toUserStats( userGuid, Mode.give == model.getSettings() .getMode(), Mode.get == model.getSettings() .getMode()); statshub.postUserStats(userGuid, countryCode, userStats); } } catch (Exception e) { LOGGER.warn("Unable to postStats: " + e.getMessage(), e); } } } }; private void addSystemStats(Stats stats) { stats.setGauge(Gauges.processCPUUsage, scalePercent(getSystemStat("getProcessCpuLoad"))); stats.setGauge(Gauges.systemCPUUsage, scalePercent(getSystemStat("getSystemCpuLoad"))); stats.setGauge(Gauges.systemLoadAverage, scalePercent(osStats.getSystemLoadAverage())); stats.setGauge(Gauges.memoryUsage, memoryMXBean .getHeapMemoryUsage() .getCommitted() + memoryMXBean.getNonHeapMemoryUsage() .getCommitted()); stats.setGauge(Gauges.openFileDescriptors, (Long) getSystemStat("getOpenFileDescriptorCount")); } private Long scalePercent(Number value) { if (value == null) return null; return (long) (((Double) value) * 100.0); } private <T extends Number> T getSystemStat(final String name) { if (!isOnUnix()) { return (T) (Double) 0.0; } else { try { final Method method = osStats.getClass() .getDeclaredMethod(name); method.setAccessible(true); return (T) method.invoke(osStats); } catch (final Exception e) { LOGGER.debug("Unable to get system stat: {}", name, e); return (T) (Double) 0.0; } } } private boolean isOnUnix() { return osStats.getClass().getName() .equals("com.sun.management.UnixOperatingSystem"); } }
package org.lightmare.remote.rcp; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelHandler; import org.lightmare.remote.rcp.wrappers.RcpWrapper; /** * Handler @see {@link SimpleChannelHandler} for RPC response * * @author levan * */ public class RcpHandler extends SimpleChannelHandler { private BlockingQueue<RcpWrapper> answer; public RcpHandler() { answer = new LinkedBlockingQueue<RcpWrapper>(); } @Override public void messageReceived(ChannelHandlerContext ctx, final MessageEvent ev) { ev.getFuture().getChannel().close().awaitUninterruptibly() .addListener(new ChannelFutureListener() { public void operationComplete(ChannelFuture future) throws Exception { boolean offered = answer.offer((RcpWrapper) ev .getMessage()); assert offered; } }); } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent ev) { ev.getCause().printStackTrace(); ev.getChannel().close().awaitUninterruptibly(); } public RcpWrapper getWrapper() { RcpWrapper responce; boolean interrupted = Boolean.TRUE; for (;;) { try { responce = answer.take(); if (interrupted) { Thread.currentThread().interrupt(); } return responce; } catch (InterruptedException ex) { interrupted = Boolean.FALSE; } } } }
package es.blueberrypancak.chesskurwa; import java.awt.AWTException; import java.awt.Color; import java.awt.Dimension; import java.awt.Rectangle; import java.awt.Robot; import java.awt.Toolkit; import java.awt.image.BufferedImage; import java.io.IOException; import java.util.Stack; public class BoardReader { private Robot robot; private int boardX, boardY, nBlackMoves; private boolean wQCastle, wKCastle, bQCastle, bKCastle; private static BufferedImage img; private Stack<String> fenStack; private String FEN, enPassant; private boolean myTurn; public BoardReader() throws IOException, AWTException { robot = new Robot(); fenStack = new Stack<String>(); FEN = ""; enPassant = "-"; nBlackMoves = 1; boardX = boardY = -1; wQCastle = wKCastle = bQCastle = bKCastle = true; findBoard(); myTurn = isWhite(); buildFEN(); } public int getX() { return this.boardX; } public int getY() { return this.boardY; } public String getFEN() { return this.FEN; } public boolean isFENBuilt() { return this.FEN.split(" ").length > 1; } public boolean isMyTurn() { for(int y = boardY+561; y <= boardY+561+13; y++) { for(int x = boardX+508; x <= boardX+508+44; x++) { if(match(x,y,Config.RED)) { return true; } } } return false; } private Dimension refresh() { Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); img = robot.createScreenCapture(new Rectangle(0, 0, screenSize.width, screenSize.height)); return screenSize; } private void findBoard() { Dimension d = refresh(); for(int x = 0; x < d.width; x++) { for(int y = 0; y < d.height; y++) { if(match(x,y,Config.LIGHT_GRAY) && match(x+1,y,Config.LIGHT_GRAY) && match(x,y+1,Config.LIGHT_GRAY) && match(x+1,y+1,Config.DARK_GRAY)) { boardX = x+2; boardY = y+2; return; } } } } private boolean tileEmpty(int x, int y) { for(int a = 0; a <= Config.TILE_WIDTH; a++) { for(int b = 0; b <= Config.TILE_WIDTH; b++) { if(match(x+b,y+a,Config.BLACK) || match(x+b,y+a,Config.WHITE)) { return false; } } } return true; } private int getTile(int x, int y) { int n = 0; for(int a = 0; a <= Config.TILE_WIDTH; a++) { for(int b = 0; b <= Config.TILE_WIDTH; b++) { if(match(x+a,y+b,Config.BLACK)) { n += 1; } } } return n; } public boolean buildFEN() throws IOException { refresh(); String tempFEN = FEN; FEN = ""; boolean b = false; int k = isWhite()? 1 : -1; int d = isWhite() ? 0 : (Config.WIDTH-Config.TILE_WIDTH); for(int y = boardY + d; isWhite() ? y < boardY+Config.WIDTH : y >= boardY; y+=k*Config.TILE_WIDTH) { for(int x = boardX + d; isWhite() ? x < boardX+Config.WIDTH : x >= boardX; x+=k*Config.TILE_WIDTH) { if(tileEmpty(x,y)) { FEN += '1'; continue; } Piece p = Piece.getType(getTile(x,y)); if(p == null) { FEN = tempFEN; return false; } FEN += p.getName(); } FEN += '/'; } FEN = FEN.substring(0, FEN.length()-1); if(fenStack.size() >= 1) { if(!fenStack.get(fenStack.size()-1).equals(FEN)) { fenStack.push(FEN); } } else if(fenStack.size() == 0) { System.out.println("CLEARED"); fenStack.push(FEN); Stockfish.run("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1"); } if(hasMoved()) { b = true; if(!myTurn) nBlackMoves += 1; myTurn = !myTurn; checkEnPassant(); } if(wQCastle) wQCastle = FEN.split("/")[7].indexOf('K') == 4 && FEN.split("/")[7].indexOf('R') == 0; if(wKCastle) wKCastle = FEN.split("/")[7].indexOf('K') == 4 && FEN.split("/")[7].charAt(7) == 'R'; if(bQCastle) bQCastle = FEN.split("/")[0].indexOf('k') == 4 && FEN.split("/")[0].indexOf('r') == 0; if(bKCastle) bKCastle = FEN.split("/")[0].indexOf('k') == 4 && FEN.split("/")[0].charAt(7) == 'r'; for(int i = 8; i >= 2; i String s =""; for(int x = 0; x < i; x++) s += "1"; FEN = FEN.replaceAll(s, i+""); } FEN += ' '; FEN += getTurn(); FEN += ' '; if(wKCastle) FEN += "K"; if(wQCastle) FEN += "Q"; if(bKCastle) FEN += "k"; if(bQCastle) FEN += "q"; if(!(bQCastle||bKCastle||wKCastle||wQCastle)) FEN += '-'; FEN += ' '; FEN += enPassant; FEN += " 0 "; FEN += nBlackMoves; return b; } public boolean fenMatchesSide() { String[] chunks = FEN.split(" "); if(chunks.length < 1) return false; return chunks[1].equals(isWhite() ? "w" : "b"); } private boolean hasMoved() { return fenStack.size() > 1 ? !fenStack.get(fenStack.size()-1).equals(fenStack.get(fenStack.size()-2)) : false; } private void checkEnPassant() { if(fenStack.size() > 1) { String r = ""; String currentFEN = fenStack.pop(); String lastFEN = fenStack.pop(); String mB = currentFEN.split("/")[3]; String pB = currentFEN.split("/")[1]; String oB = lastFEN.split("/")[1]; String mW = currentFEN.split("/")[4]; String pW = currentFEN.split("/")[6]; String oW = lastFEN.split("/")[6]; for(int i = 0; i < 8; i++){ if(pW.charAt(i) == '1' && mW.charAt(i) == 'P' && oW.charAt(i) == 'P') r = (char)(97+i)+"3"; if(pB.charAt(i) == '1' && mB.charAt(i) == 'p' && oB.charAt(i) == 'p') r = (char)(97+i)+"6"; } fenStack.push(currentFEN); enPassant = r != "" ? r : "-"; } } private char getTurn() { return isWhite() ? isMyTurn() ? 'w' : 'b' : isMyTurn() ? 'b' : 'w'; } public boolean isWhite() { return match(boardX+7,boardY+487,Config.S1) && match(boardX+8,boardY+487,Config.S2) && match(boardX+9,boardY+487,Config.S3) || match(boardX+8,boardY+487,Config.S1) && match(boardX+9,boardY+487,Config.S2) && match(boardX+10,boardY+487,Config.S3); } private static boolean match(int x, int y, Color c) { return img.getRGB(x, y) == c.getRGB(); } public String toString() { return isWhite() + ":bestmove:" + Stockfish.getBestMove() + ":x:" + boardX + ",y:" + boardY + ",FEN:"+FEN; } }
package org.joda.time; // Import for @link support import org.joda.time.convert.ConverterManager; /** * Defines an instant in time that can be queried and modified using datetime fields. * <p> * The implementation of this interface will be mutable. * It may provide more advanced methods than those in the interface. * <p> * Methods in your application should be defined using <code>ReadWritableDateTime</code> * as a parameter if the method wants to manipulate and change a date in simple ways. * * @author Stephen Colebourne * @author Brian S O'Neill */ public interface ReadWritableDateTime extends ReadableDateTime, ReadWritableInstant { void setYear(int year); void addYears(int years); void setWeekyear(int weekyear); void addWeekyears(int weekyears); void setMonthOfYear(int monthOfYear); void addMonths(int months); void setWeekOfWeekyear(int weekOfWeekyear); void addWeeks(int weeks); void setDayOfYear(int dayOfYear); void setDayOfMonth(int dayOfMonth); void setDayOfWeek(int dayOfWeek); void addDays(int days); void setHourOfDay(int hourOfDay); void addHours(int hours); void setMinuteOfDay(int minuteOfDay); void setMinuteOfHour(int minuteOfHour); void addMinutes(int minutes); void setSecondOfDay(int secondOfDay); void setSecondOfMinute(int secondOfMinute); void addSeconds(int seconds); void setMillisOfDay(int millisOfDay); void setMillisOfSecond(int millisOfSecond); void addMillis(int millis); void setDate(long instant); void setDate(Object instant); void setDate(int year, int monthOfYear, int dayOfMonth); void setTime(long millis); void setTime(Object instant); void setTime(int hour, int minuteOfHour, int secondOfMinute, int millisOfSecond); void setDateTime(long instant); void setDateTime(Object instant); void setDateTime( int year, int monthOfYear, int dayOfMonth, int hourOfDay, int minuteOfHour, int secondOfMinute, int millisOfSecond); }
package org.lightmare.utils; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.zip.ZipFile; import org.lightmare.jpa.datasource.FileParsers; import org.lightmare.scannotation.AnnotationDB; import org.lightmare.utils.earfile.DirUtils; import org.lightmare.utils.earfile.ExtUtils; import org.lightmare.utils.earfile.JarUtils; import org.lightmare.utils.earfile.SimplUtils; import org.lightmare.utils.fs.FileType; import org.lightmare.utils.fs.FileUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; /** * Utility class for checking jar, ear and zip files or ear and jar directories * from application server deployments (jboss) read contents and etc. * * @author levan * */ public abstract class AbstractIOUtils { protected Map<URL, URL> xmlURLs; protected Map<String, URL> xmlFiles; protected List<URL> libURLs; protected List<URL> ejbURLs; protected String path; protected File realFile; protected ZipFile earFile; protected List<File> tmpFiles; protected boolean isDirectory; protected boolean xmlFromJar; protected boolean executed; // Cached file types and extentions public static final String JAR = "jar"; public static final String JAR_FILE_EXT = ".jar"; public static final String EAR = "jar"; public static final String EAR_FILE_EXT = ".jar"; public static final String CLASS_FILE_EXT = ".class"; public static final String LIB = "lib"; public static final String LIB_WITH_DELIM = "lib/"; public static final String PERSISTENCE_XML = "persistence.xml"; public static final String APPLICATION_XML_PATH = "META-INF/application.xml"; public static final char ARCHIVE_URL_DELIM = '!'; // Application descriptor XML file tags public static final String EJB_TAG_NAME = "ejb"; public AbstractIOUtils(String path) { this.path = path; realFile = new File(path); isDirectory = realFile.isDirectory(); } public AbstractIOUtils(File file) { this.path = file.getPath(); realFile = file; isDirectory = realFile.isDirectory(); } public AbstractIOUtils(URL url) throws IOException { this.path = url.toString(); try { realFile = new File(url.toURI()); } catch (URISyntaxException ex) { throw new IOException(ex); } isDirectory = realFile.isDirectory(); } public abstract FileType getType(); /** * Ensures that all temporary files will be removed at finish of program * * @param file */ protected void ensureTmpFile(File file) { file.deleteOnExit(); } public boolean isExecuted() { return executed; } public boolean notExecuted() { return ObjectUtils.notTrue(executed); } public void setXmlFromJar(boolean xmlFromJar) { this.xmlFromJar = xmlFromJar; } public Map<URL, URL> getXmlURLs() { if (xmlURLs == null) { xmlURLs = new HashMap<URL, URL>(); } return xmlURLs; } public Map<String, URL> getXmlFiles() { if (xmlFiles == null) { xmlFiles = new HashMap<String, URL>(); } return xmlFiles; } public List<URL> getLibURLs() { if (libURLs == null) { libURLs = new ArrayList<URL>(); } return libURLs; } public List<URL> getEjbURLs() { if (ejbURLs == null) { ejbURLs = new ArrayList<URL>(); } return ejbURLs; } public ZipFile getEarFile() throws IOException { if (earFile == null) { earFile = new ZipFile(path); } return earFile; } private static FileType getType(File appFile) { FileType fileType; String appPath = appFile.getPath(); if (appFile.isDirectory() && appPath.endsWith(EAR_FILE_EXT)) { fileType = FileType.EDIR; } else if (appPath.endsWith(EAR_FILE_EXT)) { fileType = FileType.EAR; } else if (appPath.endsWith(JAR_FILE_EXT)) { fileType = FileType.JAR; } else { boolean isEarDir = FileUtils.checkOnEarDir(appFile); if (isEarDir) { fileType = FileType.EDIR; } else { fileType = FileType.DIR; } } return fileType; } public static AbstractIOUtils getAppropriatedType(URL url, FileType fileType) throws IOException { AbstractIOUtils ioUtils = null; File appFile; try { appFile = new File(url.toURI()); } catch (URISyntaxException ex) { throw new IOException(ex); } FileType typToCheck = fileType; if (fileType == null) { typToCheck = getType(appFile); } if (typToCheck.equals(FileType.EDIR)) { ioUtils = new DirUtils(appFile); } else if (typToCheck.equals(FileType.EAR)) { ioUtils = new ExtUtils(appFile); } else if (typToCheck.equals(FileType.JAR)) { ioUtils = new JarUtils(appFile); } else if (typToCheck.equals(FileType.DIR)) { ioUtils = new SimplUtils(appFile); } return ioUtils; } public static AbstractIOUtils getAppropriatedType(URL url) throws IOException { AbstractIOUtils ioUtils = getAppropriatedType(url, null); return ioUtils; } /** * Finds persistence.xml {@link URL} by class name * * @param classOwnersFiles * @param className * @return {@link URL} */ public URL getAppropriatedURL(Map<String, String> classOwnersFiles, String className) { String jarName = classOwnersFiles.get(className); URL xmlURL; if (jarName == null || jarName.isEmpty()) { xmlURL = null; } else { xmlURL = getXmlFiles().get(jarName); } return xmlURL; } /** * Finds persistence.xml {@link URL} by class name * * @param annotationDB * @param className * @return {@link URL} */ public URL getAppropriatedURL(AnnotationDB annotationDB, String className) { Map<String, String> classOwnersFiles = annotationDB .getClassOwnersFiles(); URL xmlURL = getAppropriatedURL(classOwnersFiles, className); return xmlURL; } public Set<String> appXmlParser(InputStream xmlStream) throws IOException { try { Document document = FileParsers.parse(xmlStream); NodeList nodeList = document.getElementsByTagName(EJB_TAG_NAME); Set<String> ejbNames = new HashSet<String>(); String ejbName; for (int i = 0; i < nodeList.getLength(); i++) { Element ejbElement = (Element) nodeList.item(i); ejbName = FileParsers.getContext(ejbElement); if (ObjectUtils.notNull(ejbName)) { ejbNames.add(ejbName); } } return ejbNames; } finally { ObjectUtils.close(xmlStream); } } public Set<String> appXmlParser() throws IOException { InputStream stream = earReader(); Set<String> jarNames = appXmlParser(stream); return jarNames; } public abstract InputStream earReader() throws IOException; public void readEntries() throws IOException { InputStream xmlStream = earReader(); Set<String> jarNames = appXmlParser(xmlStream); extractEjbJars(jarNames); } /** * Gets {@link URL}s in {@link List} for ejb library files from ear * {@link File} * * @throws IOException */ public abstract void getEjbLibs() throws IOException; public abstract void extractEjbJars(Set<String> jarNames) throws IOException; public abstract boolean checkOnOrm(String jarName) throws IOException; /** * Scans project directory for class or jar files and persistence.xml (uses * for development process) * * @param files * @throws MalformedURLException */ public void scanDirectory(File... files) throws MalformedURLException { File parentFile; if (ObjectUtils.available(files)) { parentFile = ObjectUtils.getFirst(files); } else { parentFile = realFile; } File[] subFiles = parentFile.listFiles(); String fileName; URL fileURL; for (File subFile : subFiles) { fileName = subFile.getName(); if (subFile.isDirectory()) { scanDirectory(subFile); } else if (fileName.endsWith(JAR_FILE_EXT) || fileName.endsWith(CLASS_FILE_EXT)) { fileURL = subFile.toURI().toURL(); getEjbURLs().add(fileURL); getLibURLs().add(fileURL); } else if (fileName.equals(PERSISTENCE_XML)) { fileURL = subFile.toURI().toURL(); getXmlURLs().put(realFile.toURI().toURL(), fileURL); } } } protected abstract void scanArchive(Object... args) throws IOException; public void scan(Object... args) throws IOException { scanArchive(args); executed = Boolean.TRUE; } public URL[] getLibs() { URL[] urls; if (libURLs == null) { urls = null; } else { urls = ObjectUtils.toArray(libURLs, URL.class); } return urls; } public URL[] getEjbs() { URL[] urls; if (ejbURLs == null) { urls = null; } else { urls = ObjectUtils.toArray(ejbURLs, URL.class); } return urls; } public URL[] getURLs() { List<URL> fullURLs = new ArrayList<URL>(); URL[] urls; if (ObjectUtils.notNull(ejbURLs)) { fullURLs.addAll(ejbURLs); } if (ObjectUtils.notNull(libURLs)) { fullURLs.addAll(libURLs); } urls = ObjectUtils.toArray(fullURLs, URL.class); return urls; } protected List<File> getForAddTmpFiles() { if (tmpFiles == null) { tmpFiles = new ArrayList<File>(); } return tmpFiles; } /** * Saves temporary files at cache * * @param tmpFile */ protected void addTmpFile(File tmpFile) { ensureTmpFile(tmpFile); getForAddTmpFiles().add(tmpFile); } public List<File> getTmpFiles() { return tmpFiles; } }
package org.voltdb.sysprocs; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Deque; import java.util.LinkedList; import java.util.ArrayDeque; import java.util.HashSet; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.Semaphore; import org.apache.log4j.Logger; import org.voltdb.BackendTarget; import org.voltdb.DependencyPair; import org.voltdb.ExecutionSite; import org.voltdb.ExecutionSite.SnapshotTableTask; import org.voltdb.ExecutionSite.SystemProcedureExecutionContext; import org.voltdb.HsqlBackend; import org.voltdb.ParameterSet; import org.voltdb.ProcInfo; import org.voltdb.VoltDB; import org.voltdb.VoltSystemProcedure; import org.voltdb.VoltTable; import org.voltdb.VoltTable.ColumnInfo; import org.voltdb.VoltType; import org.voltdb.SnapshotDataTarget; import org.voltdb.DefaultSnapshotDataTarget; import org.voltdb.catalog.CatalogMap; import org.voltdb.catalog.Cluster; import org.voltdb.catalog.Procedure; import org.voltdb.catalog.Site; import org.voltdb.catalog.Table; import org.voltdb.catalog.Partition; import org.voltdb.catalog.Host; import org.voltdb.dtxn.DtxnConstants; import org.voltdb.utils.CatalogUtil; import org.voltdb.utils.VoltLoggerFactory; import org.voltdb.sysprocs.saverestore.SnapshotDigestUtil; @ProcInfo(singlePartition = false) public class SnapshotSave extends VoltSystemProcedure { private static final Logger TRACE_LOG = Logger.getLogger(SnapshotSave.class.getName(), VoltLoggerFactory.instance()); private static final Logger HOST_LOG = Logger.getLogger("HOST", VoltLoggerFactory.instance()); private static final int DEP_saveTest = (int) SysProcFragmentId.PF_saveTest | DtxnConstants.MULTINODE_DEPENDENCY; private static final int DEP_saveTestResults = (int) SysProcFragmentId.PF_saveTestResults; private static final int DEP_createSnapshotTargets = (int) SysProcFragmentId.PF_createSnapshotTargets | DtxnConstants.MULTIPARTITION_DEPENDENCY; private static final int DEP_createSnapshotTargetsResults = (int) SysProcFragmentId.PF_createSnapshotTargetsResults; /** * Ensure the first thread to run the fragment does the creation * of the targets and the distribution of the work. */ private static final Semaphore m_snapshotCreateSetupPermit = new Semaphore(1); /** * Only proceed once permits are available after setup completes */ private static Semaphore m_snapshotPermits = new Semaphore(0); private static final LinkedList<Deque<SnapshotTableTask>> m_taskListsForSites = new LinkedList<Deque<SnapshotTableTask>>(); @Override public void init(ExecutionSite site, Procedure catProc, BackendTarget eeType, HsqlBackend hsql, Cluster cluster) { super.init(site, catProc, eeType, hsql, cluster); site.registerPlanFragment(SysProcFragmentId.PF_saveTest, this); site.registerPlanFragment(SysProcFragmentId.PF_saveTestResults, this); site.registerPlanFragment(SysProcFragmentId.PF_createSnapshotTargets, this); site.registerPlanFragment(SysProcFragmentId.PF_createSnapshotTargetsResults, this); } @Override public DependencyPair executePlanFragment(HashMap<Integer, List<VoltTable>> dependencies, long fragmentId, ParameterSet params, SystemProcedureExecutionContext context) { if (fragmentId == SysProcFragmentId.PF_saveTest) { assert(params.toArray()[0] != null); assert(params.toArray()[1] != null); String file_path = (String) params.toArray()[0]; String file_nonce = (String) params.toArray()[1]; TRACE_LOG.trace("Checking feasibility of save with path and nonce: " + file_path + ", " + file_nonce); VoltTable result = constructNodeResultsTable(); if (ExecutionSite.ExecutionSitesCurrentlySnapshotting.get() != -1) { result.addRow(context.getSite().getHost().getTypeName(), "", "FAILURE", "SNAPSHOT IN PROGRESS"); return new DependencyPair( DEP_saveTest, result); } for (Table table : getTablesToSave(context)) { File saveFilePath = constructFileForTable(table, file_path, file_nonce, context.getSite().getHost().getTypeName()); TRACE_LOG.trace("Host ID " + context.getSite().getHost().getTypeName() + " table: " + table.getTypeName() + " to path: " + saveFilePath); String file_valid = "SUCCESS"; String err_msg = ""; if (saveFilePath.exists()) { file_valid = "FAILURE"; err_msg = "SAVE FILE ALREADY EXISTS: " + saveFilePath; } else if (!saveFilePath.getParentFile().canWrite()) { file_valid = "FAILURE"; err_msg = "FILE LOCATION UNWRITABLE: " + saveFilePath; } else { try { saveFilePath.createNewFile(); } catch (IOException ex) { file_valid = "FAILURE"; err_msg = "FILE CREATION OF " + saveFilePath + "RESULTED IN IOException: " + ex.getMessage(); } } result.addRow(context.getSite().getHost().getTypeName(), table.getTypeName(), file_valid, err_msg); } return new DependencyPair(DEP_saveTest, result); } else if (fragmentId == SysProcFragmentId.PF_saveTestResults) { TRACE_LOG.trace("Aggregating save feasiblity results"); assert (dependencies.size() > 0); List<VoltTable> dep = dependencies.get(DEP_saveTest); VoltTable result = constructNodeResultsTable(); for (VoltTable table : dep) { while (table.advanceRow()) { // this will add the active row of table result.add(table); } } return new DependencyPair( DEP_saveTestResults, result); } else if (fragmentId == SysProcFragmentId.PF_createSnapshotTargets) { TRACE_LOG.trace("Creating snapshot target and handing to EEs"); assert(params.toArray()[0] != null); assert(params.toArray()[1] != null); assert(params.toArray()[2] != null); assert(params.toArray()[3] != null); final String file_path = (String) params.toArray()[0]; final String file_nonce = (String) params.toArray()[1]; byte block = (Byte)params.toArray()[3]; final VoltTable result = constructNodeResultsTable(); boolean willDoSetup = m_snapshotCreateSetupPermit.tryAcquire(); final int numLocalSites = VoltDB.instance().getLocalSites().values().size(); if (willDoSetup) { try { assert(ExecutionSite.ExecutionSitesCurrentlySnapshotting.get() == -1); final long startTime = (Long)params.toArray()[2]; final ArrayDeque<SnapshotTableTask> partitionedSnapshotTasks = new ArrayDeque<SnapshotTableTask>(); final ArrayList<SnapshotTableTask> replicatedSnapshotTasks = new ArrayList<SnapshotTableTask>(); final ArrayList<String> tableNames = new ArrayList<String>(); for (final Table table : getTablesToSave(context)) { tableNames.add(table.getTypeName()); } SnapshotDigestUtil.recordSnapshotTableList(file_path, file_nonce, tableNames); final AtomicInteger numTables = new AtomicInteger(tableNames.size()); final SnapshotRegistry.Snapshot snapshotRecord = SnapshotRegistry.startSnapshot( startTime, file_path, file_nonce, tableNames.toArray(new String[0])); for (final Table table : getTablesToSave(context)) { String canSnapshot = "SUCCESS"; String err_msg = ""; final File saveFilePath = constructFileForTable(table, file_path, file_nonce, context.getSite().getHost().getTypeName()); try { final SnapshotDataTarget sdt = constructSnapshotDataTargetForTable( context, saveFilePath, table, context.getSite().getHost(), context.getCluster().getPartitions().size(), startTime); final Runnable onClose = new Runnable() { @Override public void run() { final long now = System.currentTimeMillis(); snapshotRecord.updateTable(table.getTypeName(), new SnapshotRegistry.Snapshot.TableUpdater() { @Override public SnapshotRegistry.Snapshot.Table update( SnapshotRegistry.Snapshot.Table registryTable) { return snapshotRecord.new Table( registryTable, sdt.getBytesWritten(), now, sdt.getLastWriteException()); } }); int tablesLeft = numTables.decrementAndGet(); if (tablesLeft == 0) { final SnapshotRegistry.Snapshot completed = SnapshotRegistry.finishSnapshot(snapshotRecord); final double duration = (completed.timeFinished - completed.timeStarted) / 1000.0; HOST_LOG.info( "Snapshot " + snapshotRecord.nonce + " finished at " + completed.timeFinished + " and took " + duration + " seconds "); } } }; sdt.setOnCloseHandler(onClose); final SnapshotTableTask task = new SnapshotTableTask( table.getRelativeIndex(), sdt, table.getIsreplicated(), table.getTypeName()); if (table.getIsreplicated()) { replicatedSnapshotTasks.add(task); } else { partitionedSnapshotTasks.offer(task); } } catch (IOException ex) { canSnapshot = "FAILURE"; err_msg = "SNAPSHOT INITIATION OF " + saveFilePath + "RESULTED IN IOException: " + ex.getMessage(); } result.addRow(context.getSite().getHost().getTypeName(), table.getTypeName(), canSnapshot, err_msg); } synchronized (m_taskListsForSites) { if (!partitionedSnapshotTasks.isEmpty() || !replicatedSnapshotTasks.isEmpty()) { ExecutionSite.ExecutionSitesCurrentlySnapshotting.set( VoltDB.instance().getLocalSites().values().size()); } else { SnapshotRegistry.discardSnapshot(snapshotRecord); } /** * Distribute the writing of replicated tables to exactly one partition. */ for (int ii = 0; ii < numLocalSites && !partitionedSnapshotTasks.isEmpty(); ii++) { m_taskListsForSites.add(new ArrayDeque<SnapshotTableTask>(partitionedSnapshotTasks)); } int siteIndex = 0; for (SnapshotTableTask t : replicatedSnapshotTasks) { m_taskListsForSites.get(siteIndex++ % numLocalSites).offer(t); } } } catch (Exception ex) { result.addRow(context.getSite().getHost().getTypeName(), "", "FAILURE", "SNAPSHOT INITIATION OF " + file_path + file_nonce + "RESULTED IN Exception: " + ex.getMessage()); HOST_LOG.error(ex); } finally { m_snapshotPermits.release(numLocalSites); } } try { m_snapshotPermits.acquire(); } catch (Exception e) { result.addRow(context.getSite().getHost().getTypeName(), "", "FAILURE", e.toString()); return new DependencyPair( DEP_createSnapshotTargets, result); } finally { /* * The last thead to acquire a snapshot permit has to be the one * to release the setup permit to ensure that a thread * doesn't come late and think it is supposed to do the setup work */ synchronized (m_snapshotPermits) { if (m_snapshotPermits.availablePermits() == 0 && m_snapshotCreateSetupPermit.availablePermits() == 0) { m_snapshotCreateSetupPermit.release(); } } } synchronized (m_taskListsForSites) { final Deque<SnapshotTableTask> m_taskList = m_taskListsForSites.poll(); if (m_taskList == null) { return new DependencyPair( DEP_createSnapshotTargets, result); } else { if (m_taskListsForSites.isEmpty()) { assert(m_snapshotCreateSetupPermit.availablePermits() == 1); assert(m_snapshotPermits.availablePermits() == 0); } assert(ExecutionSite.ExecutionSitesCurrentlySnapshotting.get() > 0); context.getExecutionSite().initiateSnapshots(m_taskList); } } if (block != 0) { HashSet<Exception> failures = null; String status = "SUCCESS"; String err = ""; try { failures = context.getExecutionSite().completeSnapshotWork(); } catch (InterruptedException e) { status = "FAILURE"; err = e.toString(); } final VoltTable blockingResult = constructPartitionResultsTable(); if (failures.isEmpty()) { blockingResult.addRow( context.getSite().getHost().getTypeName(), context.getSite().getTypeName(), status, err); } else { status = "FAILURE"; for (Exception e : failures) { err = e.toString(); } blockingResult.addRow( context.getSite().getHost().getTypeName(), context.getSite().getTypeName(), status, err); } return new DependencyPair( DEP_createSnapshotTargets, blockingResult); } return new DependencyPair( DEP_createSnapshotTargets, result); } else if (fragmentId == SysProcFragmentId.PF_createSnapshotTargetsResults) { TRACE_LOG.trace("Aggregating create snapshot target results"); assert (dependencies.size() > 0); List<VoltTable> dep = dependencies.get(DEP_createSnapshotTargets); VoltTable result = constructNodeResultsTable(); for (VoltTable table : dep) { while (table.advanceRow()) { // this will add the active row of table result.add(table); } } return new DependencyPair( DEP_createSnapshotTargetsResults, result); } assert (false); return null; } public VoltTable[] run(String path, String nonce, long block) throws VoltAbortException { final long startTime = System.currentTimeMillis(); HOST_LOG.info("Saving database to path: " + path + ", ID: " + nonce + " at " + System.currentTimeMillis()); if (path == null || path.equals("")) { ColumnInfo[] result_columns = new ColumnInfo[1]; int ii = 0; result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING); VoltTable results[] = new VoltTable[] { new VoltTable(result_columns) }; results[0].addRow("Provided path was null or the empty string"); return results; } if (nonce == null || nonce.equals("")) { ColumnInfo[] result_columns = new ColumnInfo[1]; int ii = 0; result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING); VoltTable results[] = new VoltTable[] { new VoltTable(result_columns) }; results[0].addRow("Provided nonce was null or the empty string"); return results; } if (nonce.contains("-") || nonce.contains(",")) { ColumnInfo[] result_columns = new ColumnInfo[1]; int ii = 0; result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING); VoltTable results[] = new VoltTable[] { new VoltTable(result_columns) }; results[0].addRow("Provided nonce " + nonce + " contains a prohitibited character (- or ,)"); return results; } // See if we think the save will succeed VoltTable[] results; results = performSaveFeasibilityWork(path, nonce); // Test feasibility results for fail while (results[0].advanceRow()) { if (results[0].getString(2).equals("FAILURE")) { // Something lost, bomb out and just return the whole // table of results to the client for analysis return results; } } results = performSnapshotCreationWork( path, nonce, startTime, (byte)block); final long finishTime = System.currentTimeMillis(); final long duration = finishTime - startTime; HOST_LOG.info("Snapshot initiation took " + duration + " milliseconds"); return results; } // XXX this could maybe move to be a method on // SystemProcedureExecutionContext? private final List<Table> getTablesToSave(SystemProcedureExecutionContext context) { CatalogMap<Table> all_tables = context.getDatabase().getTables(); ArrayList<Table> my_tables = new ArrayList<Table>(); for (Table table : all_tables) { // We're responsible for saving any table that isn't replicated and // all the replicated tables if we're the lowest site ID on our host // Also, we ignore all materialized tables as those should get // regenerated when we restore // NOTE: this assumes that all partitioned tables have partitions on // all execution sites. if (table.getMaterializer() == null) { my_tables.add(table); } } return my_tables; } private final File constructFileForTable(Table table, String filePath, String fileNonce, String hostId) { StringBuilder filename_builder = new StringBuilder(fileNonce); filename_builder.append("-"); filename_builder.append(table.getTypeName()); if (!table.getIsreplicated()) { filename_builder.append("-host_"); filename_builder.append(hostId); } filename_builder.append(".vpt");//Volt partitioned table return new File(filePath, new String(filename_builder)); } private final SnapshotDataTarget constructSnapshotDataTargetForTable( SystemProcedureExecutionContext context, File f, Table table, Host h, int numPartitions, long createTime) throws IOException { return new DefaultSnapshotDataTarget( f, Integer.parseInt(h.getTypeName()), context.getCluster().getTypeName(), context.getDatabase().getTypeName(), table.getTypeName(), numPartitions, table.getIsreplicated(), getPartitionsOnHost(context, h), CatalogUtil.getVoltTable(table), createTime); } private final VoltTable constructNodeResultsTable() { return new VoltTable(nodeResultsColumns); } public static final ColumnInfo nodeResultsColumns[] = new ColumnInfo[] { new ColumnInfo("HOST_ID", VoltType.STRING), new ColumnInfo("TABLE", VoltType.STRING), new ColumnInfo("RESULT", VoltType.STRING), new ColumnInfo("ERR_MSG", VoltType.STRING) }; public static final ColumnInfo partitionResultsColumns[] = new ColumnInfo[] { new ColumnInfo("HOST_ID", VoltType.STRING), new ColumnInfo("SITE_ID", VoltType.STRING), new ColumnInfo("RESULT", VoltType.STRING), new ColumnInfo("ERR_MSG", VoltType.STRING) }; private final VoltTable constructPartitionResultsTable() { return new VoltTable(partitionResultsColumns); } private final VoltTable[] performSaveFeasibilityWork(String filePath, String fileNonce) { SynthesizedPlanFragment[] pfs = new SynthesizedPlanFragment[2]; // This fragment causes each execution site to confirm the likely // success of writing tables to disk pfs[0] = new SynthesizedPlanFragment(); pfs[0].fragmentId = SysProcFragmentId.PF_saveTest; pfs[0].outputDepId = DEP_saveTest; pfs[0].inputDepIds = new int[] {}; pfs[0].multipartition = false; pfs[0].nonExecSites = true; ParameterSet params = new ParameterSet(); params.setParameters(filePath, fileNonce); pfs[0].parameters = params; // This fragment aggregates the save-to-disk sanity check results pfs[1] = new SynthesizedPlanFragment(); pfs[1].fragmentId = SysProcFragmentId.PF_saveTestResults; pfs[1].outputDepId = DEP_saveTestResults; pfs[1].inputDepIds = new int[] { DEP_saveTest }; pfs[1].multipartition = false; pfs[1].nonExecSites = false; pfs[1].parameters = new ParameterSet(); VoltTable[] results; results = executeSysProcPlanFragments(pfs, DEP_saveTestResults); return results; } private final VoltTable[] performSnapshotCreationWork(String filePath, String fileNonce, long startTime, byte block) { SynthesizedPlanFragment[] pfs = new SynthesizedPlanFragment[2]; // This fragment causes each execution site to confirm the likely // success of writing tables to disk pfs[0] = new SynthesizedPlanFragment(); pfs[0].fragmentId = SysProcFragmentId.PF_createSnapshotTargets; pfs[0].outputDepId = DEP_createSnapshotTargets; pfs[0].inputDepIds = new int[] {}; pfs[0].multipartition = true; pfs[0].nonExecSites = false; ParameterSet params = new ParameterSet(); params.setParameters(filePath, fileNonce, startTime, block); pfs[0].parameters = params; // This fragment aggregates the save-to-disk sanity check results pfs[1] = new SynthesizedPlanFragment(); pfs[1].fragmentId = SysProcFragmentId.PF_createSnapshotTargetsResults; pfs[1].outputDepId = DEP_createSnapshotTargetsResults; pfs[1].inputDepIds = new int[] { DEP_createSnapshotTargets }; pfs[1].multipartition = false; pfs[1].nonExecSites = false; pfs[1].parameters = new ParameterSet(); VoltTable[] results; results = executeSysProcPlanFragments(pfs, DEP_createSnapshotTargetsResults); return results; } private int[] getPartitionsOnHost( SystemProcedureExecutionContext c, Host h) { final ArrayList<Partition> results = new ArrayList<Partition>(); for (final Site s : c.getCluster().getSites()) { if (s.getHost().getTypeName().equals(h.getTypeName())) { if (s.getPartition() != null) { results.add(s.getPartition()); } } } final int retval[] = new int[results.size()]; int ii = 0; for (final Partition p : results) { retval[ii++] = Integer.parseInt(p.getTypeName()); } return retval; } }
package kg.apc.jmeter.perfmon.agent; import java.io.IOException; import java.net.ServerSocket; import kg.apc.jmeter.perfmon.AgentConnector; /** * The main Agent class which starts the socket server and listen to * incoming connections. It is a console application, so we will use * System.out to print messages. * @author Stephane Hoblingre */ public class ServerAgent implements Runnable { /** * The version of the Agent */ private static String version = "1.3"; /** * The default port */ public static int DEFAULT_PORT = 4444; private int port = -1; /** * listening loop controller */ private boolean listening = true; /** * Constructor * @param port the port to run the agent */ public ServerAgent(int port) { this.port=port; } /** * One simple method to log message * @param message */ public static void logMessage(String message) { System.out.println(message); } /** * stop the service */ public void stopService() { listening = false; } /** * For Unit tests only */ public void startServiceAsThread() { Thread t = new Thread(this); t.start(); } public void startServie() { listening = true; ServerSocket serverSocket = null; MetricsGetter.getInstance().getValues(AgentConnector.CPU); try { serverSocket = new ServerSocket(port); } catch (IOException e) { logMessage("Could not listen on port: " + port + ". Please specify another port..."); System.exit(-1); } logMessage("Waiting for incoming connections..."); // For now, to stop the agent we must end the pocess (ctrl+c, kill, etc.) while (listening) { try { new ConnectionThread(serverSocket.accept()).start(); } catch (IOException e) { logMessage("Impossible to create the connection with the client. Error is:"); logMessage(e.getMessage()); } } try { serverSocket.close(); } catch (IOException e) { // do nothing... } } /** * The main method to start the agent * @param args [optional] the port on which the agent will start. 4444 is used if nothing is specified. */ public static void main(String[] args) { ServerAgent.logMessage("JMeterPlugins Agent version " + version); int port = ServerAgent.DEFAULT_PORT; if(args.length > 0) { try { port = Integer.valueOf(args[0]).intValue(); ServerAgent.logMessage("The Agent will use port: " + port); } catch (Exception e) { ServerAgent.logMessage("No valid port specified, the default value is used: " + port); } } else { logMessage("No port specified, the default value is used: " + port); } ServerAgent agent = new ServerAgent(port); agent.startServie(); } //Need to remove annotation for Java 1.4 compilation //@Override public void run() { startServie(); } }
package org.lightmare.utils; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * Utility class to work with {@link Collection} instances * * @author Levan * @since 0.0.81-SNAPSHOT */ public abstract class CollectionUtils { // First index of array public static final int FIRST_INDEX = 0; // Second index of array public static final int SECOND_INDEX = 1; // Index of not existing data in collection public static final int NOT_EXISTING_INDEX = -1; // Length of empty array public static final int EMPTY_ARRAY_LENGTH = 0; // Empty array of objects public static final Object[] EMPTY_ARRAY = {}; /** * Checks if passed {@link Collection} instance is not empty * * @param collection * @return <code>boolean</code> */ public static boolean notEmpty(Collection<?> collection) { return !collection.isEmpty(); } /** * Checks passed {@link Collection} instance on null and on emptiness * returns true if it is not null and is not empty * * @param collection * @return <code></code> */ public static boolean valid(Collection<?> collection) { return collection != null && !collection.isEmpty(); } /** * Checks passed {@link Map} instance on null and emptiness returns true if * it is not null and is not empty * * @param map * @return <code>boolean</code> */ public static boolean valid(Map<?, ?> map) { return map != null && !map.isEmpty(); } /** * Checks if passed {@link Map} instance is null or is empty * * @param map * @return <code>boolean</code> */ public static boolean invalid(Map<?, ?> map) { return !valid(map); } /** * Checks if passed {@link Collection} instance is null or is empty * * @param collection * @return <code>boolean</code> */ public static boolean invalid(Collection<?> collection) { return !valid(collection); } /** * Checks if there is null or empty {@link Collection} instance is passed * collections * * @param collections * @return <code>boolean</code> */ public static boolean invalidAll(Collection<?>... collections) { return !valid(collections); } /** * Checks if each of passed {@link Map} instances is null or is empty * * @param maps * @return <code>boolean</code> */ public static boolean validAll(Map<?, ?>... maps) { boolean avaliable = ObjectUtils.notNull(maps); if (avaliable) { Map<?, ?> map; for (int i = FIRST_INDEX; i < maps.length && avaliable; i++) { map = maps[i]; avaliable = avaliable && valid(map); } } return avaliable; } public static boolean valid(Object[] array) { return array != null && array.length > EMPTY_ARRAY_LENGTH; } public static boolean invalid(Object[] array) { return !valid(array); } public static boolean validAll(Collection<?>... collections) { boolean avaliable = ObjectUtils.notNull(collections); if (avaliable) { Collection<?> collection; for (int i = FIRST_INDEX; i < collections.length && avaliable; i++) { collection = collections[i]; avaliable = avaliable && valid(collection); } } return avaliable; } public static boolean validAll(Object[]... arrays) { boolean avaliable = ObjectUtils.notNull(arrays); if (avaliable) { Object[] collection; int length = arrays.length; for (int i = FIRST_INDEX; i < length && avaliable; i++) { collection = arrays[i]; avaliable = avaliable && valid(collection); } } return avaliable; } /** * Gets value from passed {@link Map} as other {@link Map} instance * * @param key * @param from * @return {@link Map}<K,V> */ public static <K, V> Map<K, V> getAsMap(Object key, Map<?, ?> from) { Map<K, V> result; if (valid(from)) { Object objectValue = from.get(key); if (objectValue instanceof Map) { result = ObjectUtils.cast(objectValue); } else { result = null; } } else { result = null; } return result; } /** * Gets values from passed {@link Map} as other {@link Map} instance * recursively by passed keys array * * @param from * @param keys * @return {@link Map} */ public static Map<?, ?> getAsMap(Map<?, ?> from, Object... keys) { Map<?, ?> result = from; int length = keys.length; Object key; for (int i = FIRST_INDEX; i < length && ObjectUtils.notNull(result); i++) { key = keys[i]; result = getAsMap(key, result); } return result; } /** * Gets values from passed {@link Map} as other {@link Map} instance * recursively by passed keys array and for first key get value from last * {@link Map} instance * * @param from * @param keys * @return <code>V</code> */ public static <V> V getSubValue(Map<?, ?> from, Object... keys) { V value; int length = keys.length - 1; Object[] subKeys = new Object[length]; Object key = keys[length]; for (int i = FIRST_INDEX; i < length; i++) { subKeys[i] = keys[i]; } Map<?, ?> result = getAsMap(from, subKeys); if (valid(result)) { value = ObjectUtils.cast(result.get(key)); } else { value = null; } return value; } /** * Puts passed value to passed {@link Map} instance on passed key of such * does not contained * * @param map * @param key * @param value */ public static <K, V> void putIfAbscent(Map<K, V> map, K key, V value) { boolean contained = map.containsKey(key); if (ObjectUtils.notTrue(contained)) { map.put(key, value); } } /** * Creates new {@link Set} from passed {@link Collection} instance * * @param collection * @return {@link Set}<code><T></code> */ public static <T> Set<T> translateToSet(Collection<T> collection) { Set<T> set; if (valid(collection)) { set = new HashSet<T>(collection); } else { set = Collections.emptySet(); } return set; } /** * Creates new {@link Set} from passed array instance * * @param array * @return {@link Set}<code><T></code> */ public static <T> Set<T> translateToSet(T[] array) { List<T> collection; if (valid(array)) { collection = Arrays.asList(array); } else { collection = null; } return translateToSet(collection); } /** * Creates new {@link List} from passed {@link Collection} instance * * @param collection * @return {@link List}<code><T></code> */ public static <T> List<T> translateToList(Collection<T> collection) { List<T> list; if (valid(collection)) { list = new ArrayList<T>(collection); } else { list = Collections.emptyList(); } return list; } private static <T> T[] toArray(Class<T> type, int size) { Object arrayObject = Array.newInstance(type, size); T[] array = ObjectUtils.cast(arrayObject); return array; } /** * Checks if passed {@link Object} is array * * @param data * @return <code>boolean</code> */ public static boolean isArray(final Object data) { boolean valid = (data instanceof Object[] || data instanceof boolean[] || data instanceof byte[] || data instanceof short[] || data instanceof char[] || data instanceof int[] || data instanceof long[] || data instanceof float[] || data instanceof double[]); return valid; } /** * Checks if passed {@link Object} is {@link Object} types array * * @param data * @return <code>boolean</code> */ public static boolean isObjectArray(final Object data) { boolean valid = (data instanceof Object[]); return valid; } /** * Checks if passed {@link Object} is primitive types array * * @param data * @return <code>boolean</code> */ public static boolean isPrimitiveArray(final Object data) { boolean valid = (data instanceof boolean[] || data instanceof byte[] || data instanceof short[] || data instanceof char[] || data instanceof int[] || data instanceof long[] || data instanceof float[] || data instanceof double[]); return valid; } /** * Converts passed {@link Collection} to array of appropriated {@link Class} * type * * @param collection * @param type * @return <code>T[]</code> */ public static <T> T[] toArray(Collection<T> collection, Class<T> type) { T[] array; if (ObjectUtils.notNull(collection)) { array = toArray(type, collection.size()); array = collection.toArray(array); } else { array = null; } return array; } /** * Creates empty array of passed type * * @param type * @return <code>T[]</code> */ public static <T> T[] emptyArray(Class<T> type) { T[] empty = toArray(type, EMPTY_ARRAY_LENGTH); return empty; } /** * Peaks first element from list * * @param list * @return T */ private static <T> T getFirstFromList(List<T> list) { T value; if (valid(list)) { value = list.get(FIRST_INDEX); } else { value = null; } return value; } /** * Peaks first element from collection * * @param collection * @return T */ public static <T> T getFirst(Collection<T> collection) { T value; if (valid(collection)) { if (collection instanceof List) { value = getFirstFromList(((List<T>) collection)); } else { Iterator<T> iterator = collection.iterator(); value = iterator.next(); } } else { value = null; } return value; } /** * Peaks first element from array * * @param collection * @return T */ public static <T> T getFirst(T[] values) { T value; if (valid(values)) { value = values[FIRST_INDEX]; } else { value = null; } return value; } }
package org.mcupdater.util; import org.apache.commons.codec.language.Soundex; import org.apache.commons.lang3.StringUtils; import org.mcupdater.api.Version; import org.mcupdater.model.*; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.io.BufferedWriter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.*; public class ServerDefinition { public final Map<String, String> modExceptions = new HashMap<>(); public final Map<String, String> configExceptions = new HashMap<>(); public static boolean hasLitemods = false; private ServerList entry; private List<Import> imports; private Map<String, Module> modules; private List<ConfigFile> tempConfigs; public ServerDefinition() { this.entry = new ServerList(); this.imports = new ArrayList<>(); this.modules = new HashMap<>(); this.tempConfigs = new ArrayList<>(); initExceptions(); } private void initExceptions() { modExceptions.put("NotEnoughItems", "NEI"); modExceptions.put("AWWayofTime", "BloodMagic"); modExceptions.put("WR-CBE|Core", "WirelessRedstone"); modExceptions.put("TConstruct", "TinkersWorkshop"); modExceptions.put("inventorytweaks", "InvTweaks"); modExceptions.put("ProjRed|Core", "ProjectRed"); configExceptions.put("AWWayofTime", "BloodMagic"); configExceptions.put("microblocks", "ForgeMultipart"); configExceptions.put("cofh/world", "CoFHCore"); configExceptions.put("cofh/Lexicon-Whitelist", "CoFHCore"); configExceptions.put("hqm", "HardcoreQuesting"); configExceptions.put("forgeChunkLoading", "forge-\\d+.\\d+.\\d+.\\d+"); configExceptions.put("forge.cfg", "forge-\\d+.\\d+.\\d+.\\d+"); configExceptions.put("splash.properties", "forge-\\d+.\\d+.\\d+.\\d+"); configExceptions.put("scripts", "MineTweaker3"); configExceptions.put(".zs", "MineTweaker3"); configExceptions.put("resources", "ResourceLoader"); configExceptions.put("advRocketry","advancedRocketry"); configExceptions.put("AppliedEnergistics2","appliedenergistics2"); configExceptions.put("brandon3055","brandonscore"); configExceptions.put("Extreme Reactors","bigreactors"); configExceptions.put("Tiny Progressions","tp"); configExceptions.put("DEPSAMarker.txt","draconicevolution"); configExceptions.put("WirelessCraftingTerminal.cfg","wct"); } public void writeServerPack(String stylesheet, Path outputFile, List<Module> moduleList, Boolean onlyOverrides) { try { if (hasLitemods && !hasMod(moduleList, "liteloader")) { moduleList.add(new Module("LiteLoader", "liteloader", Arrays.asList(new PrioritizedURL("http://dl.liteloader.com/versions/com/mumfrey/liteloader/" + this.getServerEntry().getVersion() + "/liteloader-" + this.getServerEntry().getVersion() + ".jar", 0)), null, "", false, ModType.Library, 100, false, false, true, "", null, "CLIENT", "", null, "--tweakClass com.mumfrey.liteloader.launch.LiteLoaderTweaker", "", null, "")); moduleList = MCUpdater.getInstance().sortMods(moduleList); } BufferedWriter fileWriter = Files.newBufferedWriter(outputFile, StandardCharsets.UTF_8, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); generateServerPackHeaderXML(stylesheet, fileWriter); generateServerHeaderXML(this.getServerEntry(), fileWriter); generateServerDetailXML(fileWriter, this.imports, moduleList, onlyOverrides); generateServerFooterXML(fileWriter); generateServerPackFooterXML(fileWriter); fileWriter.close(); } catch (IOException e) { e.printStackTrace(); } } public boolean hasMod(List<Module> moduleList, String modId) { for (Module entry : moduleList) { if (entry.getId().equals(modId)) { return true; } } return false; } public static void generateServerHeaderXML(Server server, BufferedWriter writer) throws IOException { writer.write("\t<Server id=\"" + xmlEscape(server.getServerId()) + "\" abstract=\"" + Boolean.toString(server.isFakeServer()) + "\" name=\"" + xmlEscape(server.getName()) + (server.getNewsUrl().isEmpty() ? "" : ("\" newsUrl=\"" + xmlEscape(server.getNewsUrl()))) + (server.getIconUrl().isEmpty() ? "" : ("\" iconUrl=\"" + xmlEscape(server.getIconUrl()))) + "\" version=\"" + xmlEscape(server.getVersion()) + (server.getAddress().isEmpty() ? "" : ("\" serverAddress=\"" + xmlEscape(server.getAddress()))) + "\" generateList=\"" + Boolean.toString(server.isGenerateList()) + "\" autoConnect=\"" + Boolean.toString(server.isAutoConnect()) + "\" revision=\"" + xmlEscape(server.getRevision()) + "\" mainClass=\"" + xmlEscape(server.getMainClass()) + "\" launcherType=\"" + server.getLauncherType() + (server.getLibOverrides().size() == 0 ? "" : ("\" libOverrides=\"" + StringUtils.join(server.getLibOverrides().values()," "))) + (server.getServerClass_Raw().isEmpty() ? "" : ("\" serverClass=\"" + server.getServerClass_Raw())) + "\">"); writer.newLine(); } public static void generateServerPackHeaderXML(String stylesheet, BufferedWriter writer) throws IOException { writer.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); writer.newLine(); if (!stylesheet.isEmpty()) { writer.write("<?xml-stylesheet href=\"" + xmlEscape(stylesheet) + "\" type=\"text/xsl\" ?>"); writer.newLine(); } writer.write("<ServerPack version=\"" + Version.API_VERSION + "\" xmlns=\"http: writer.newLine(); } public static void generateServerDetailXML(BufferedWriter writer, List<Import> importsList, List<Module> moduleList, Boolean onlyOverrides) throws IOException { for (Import importEntry : importsList) { writer.write("\t\t<Import" + (importEntry.getUrl().isEmpty() ? ">" : (" url=\"" + xmlEscape(importEntry.getUrl())) + "\">") + importEntry.getServerId() + "</Import>"); writer.newLine(); } for (Module moduleEntry : moduleList) { writer.write("\t\t<Module name=\"" + xmlEscape(moduleEntry.getName()) + "\" id=\"" + moduleEntry.getId() + "\" depends=\"" + moduleEntry.getDepends() + "\" side=\"" + moduleEntry.getSide() + "\">"); writer.newLine(); if (!onlyOverrides) { for (PrioritizedURL url : moduleEntry.getPrioritizedUrls()) { writer.write("\t\t\t<URL priority=\"" + url.getPriority() + "\">" + xmlEscape(url.getUrl()) + "</URL>"); writer.newLine(); } if (moduleEntry.getCurseProject() != null) { writer.write("\t\t\t<Curse" + " project=\"" + moduleEntry.getCurseProject().getProject() + "\"" + (moduleEntry.getCurseProject().getFile() != 0 ? (" file=\"" + Integer.toString(moduleEntry.getCurseProject().getFile()) + "\"") : "") + " type=\"" + moduleEntry.getCurseProject().getReleaseType().toString() + "\"" + " autoupgrade=\"" + Boolean.toString(moduleEntry.getCurseProject().getAutoUpgrade()) + "\"/>"); writer.newLine(); } if (!moduleEntry.getLoadPrefix().isEmpty()) { writer.write("\t\t\t<LoadPrefix>" + xmlEscape(moduleEntry.getLoadPrefix()) + "</LoadPrefix>"); writer.newLine(); } if (!moduleEntry.getPath().isEmpty()) { writer.write("\t\t\t<ModPath>" + xmlEscape(moduleEntry.getPath()) + "</ModPath>"); writer.newLine(); } writer.write("\t\t\t<Size>" + Long.toString(moduleEntry.getFilesize()) + "</Size>"); writer.newLine(); writer.write("\t\t\t<Required"); if (!moduleEntry.getRequired() && moduleEntry.getIsDefault()) { writer.write(" isDefault=\"true\""); } writer.write(">" + (moduleEntry.getRequired() ? "true" : "false") + "</Required>"); writer.newLine(); writer.write("\t\t\t<ModType"); if (moduleEntry.getInRoot()) { writer.write(" inRoot=\"true\""); } if (moduleEntry.getJarOrder() > 0 && moduleEntry.getModType().equals(ModType.Jar)) { writer.write(" order=\"" + moduleEntry.getJarOrder() + "\""); } if (moduleEntry.getKeepMeta()) { writer.write(" keepMeta=\"true\""); } if (!moduleEntry.getLaunchArgs().isEmpty()) { writer.write(" launchArgs=\"" + xmlEscape(moduleEntry.getLaunchArgs()) + "\""); } if (!moduleEntry.getJreArgs().isEmpty()) { writer.write(" jreArgs=\"" + xmlEscape(moduleEntry.getJreArgs())); } writer.write(">" + moduleEntry.getModType().toString() + "</ModType>"); writer.newLine(); if (!moduleEntry.getMD5().isEmpty()) { writer.write("\t\t\t<MD5>" + moduleEntry.getMD5() + "</MD5>"); writer.newLine(); } if (moduleEntry.getMeta().size() > 0) { writer.write("\t\t\t<Meta>"); writer.newLine(); for (Map.Entry<String, String> metaEntry : moduleEntry.getMeta().entrySet()) { writer.write("\t\t\t\t<" + xmlEscape(metaEntry.getKey()) + ">" + xmlEscape((metaEntry.getValue() == null ? "" : metaEntry.getValue())) + "</" + xmlEscape(metaEntry.getKey()) + ">"); writer.newLine(); } writer.write("\t\t\t</Meta>"); writer.newLine(); } for (GenericModule submodule : moduleEntry.getSubmodules()) { writer.write("\t\t\t<Submodule name=\"" + xmlEscape(submodule.getName()) + "\" id=\"" + submodule.getId() + "\" depends=\"" + submodule.getDepends() + "\" side=\"" + submodule.getSide() + "\">"); writer.newLine(); for (PrioritizedURL url : submodule.getPrioritizedUrls()) { writer.write("\t\t\t\t<URL priority=\"" + url.getPriority() + "\">" + xmlEscape(url.getUrl()) + "</URL>"); writer.newLine(); } if (submodule.getCurseProject() != null) { writer.write("\t\t\t\t<Curse" + " project=\"" + submodule.getCurseProject().getProject() + "\"" + (submodule.getCurseProject().getFile() != -1 ? " file=\"" + Integer.toString(submodule.getCurseProject().getFile()) + "\"" : "") + " type=\"" + submodule.getCurseProject().getReleaseType().toString() + "\"" + " autoupgrade=\"" + Boolean.toString(submodule.getCurseProject().getAutoUpgrade()) + "\"/>"); writer.newLine(); } if (!submodule.getLoadPrefix().isEmpty()) { writer.write("\t\t\t\t<LoadPrefix>" + xmlEscape(submodule.getLoadPrefix()) + "</LoadPrefix>"); writer.newLine(); } if (!submodule.getPath().isEmpty()) { writer.write("\t\t\t\t<ModPath>" + xmlEscape(submodule.getPath()) + "</ModPath>"); writer.newLine(); } writer.write("\t\t\t\t<Size>" + Long.toString(submodule.getFilesize()) + "</Size>"); writer.newLine(); writer.write("\t\t\t\t<Required"); if (!submodule.getRequired() && submodule.getIsDefault()) { writer.write(" isDefault=\"true\""); } writer.write(">" + (submodule.getRequired() ? "true" : "false") + "</Required>"); writer.newLine(); writer.write("<ModType"); if (submodule.getInRoot()) { writer.write(" inRoot=\"true\""); } if (submodule.getJarOrder() > 0 && submodule.getModType().equals(ModType.Jar)) { writer.write(" order=\"" + submodule.getJarOrder() + "\""); } if (submodule.getKeepMeta()) { writer.write(" keepMeta=\"true\""); } if (!submodule.getLaunchArgs().isEmpty()) { writer.write(" launchArgs=\"" + xmlEscape(submodule.getLaunchArgs()) + "\""); } if (!submodule.getJreArgs().isEmpty()) { writer.write(" jreArgs=\"" + xmlEscape(submodule.getJreArgs())); } writer.write(">" + submodule.getModType().toString() + "</ModType>"); writer.newLine(); writer.write("\t\t\t\t<MD5>" + submodule.getMD5() + "</MD5>"); writer.newLine(); if (submodule.getMeta().size() > 0) { writer.write("\t\t\t\t<Meta>"); writer.newLine(); for (Map.Entry<String, String> metaEntry : submodule.getMeta().entrySet()) { writer.write("\t\t\t\t\t<" + xmlEscape(metaEntry.getKey()) + ">" + xmlEscape(metaEntry.getValue()) + "</" + xmlEscape(metaEntry.getKey()) + ">"); writer.newLine(); } writer.write("\t\t\t\t</Meta>"); writer.newLine(); } writer.write("\t\t\t</Submodule>"); writer.newLine(); } } else { writer.write("\t\t\t<Required"); if (!moduleEntry.getRequired() && moduleEntry.getIsDefault()) { writer.write(" isDefault=\"true\""); } writer.write(">" + (moduleEntry.getRequired() ? "true" : "false") + "</Required>"); writer.newLine(); writer.write("\t\t\t<ModType>Override</ModType>"); writer.newLine(); } for (ConfigFile config : moduleEntry.getConfigs()) { writer.write("\t\t\t<ConfigFile>"); writer.newLine(); for (PrioritizedURL url : config.getPrioritizedUrls()) { writer.write("\t\t\t\t<URL priority=\"" + url.getPriority() + "\">" + xmlEscape(url.getUrl()) + "</URL>"); writer.newLine(); } writer.write("\t\t\t\t<Path>" + xmlEscape(config.getPath()) + "</Path>"); writer.newLine(); writer.write("\t\t\t\t<NoOverwrite>" + config.isNoOverwrite() + "</NoOverwrite>"); writer.newLine(); writer.write("\t\t\t\t<MD5>" + xmlEscape(config.getMD5()) + "</MD5>"); writer.newLine(); writer.write("\t\t\t</ConfigFile>"); writer.newLine(); } writer.write("\t\t</Module>"); writer.newLine(); } } public static void generateServerFooterXML(BufferedWriter writer) throws IOException { writer.write("\t</Server>"); writer.newLine(); } public static void generateServerPackFooterXML(BufferedWriter writer) throws IOException { writer.write("</ServerPack>"); writer.newLine(); } private static String xmlEscape(String input) { String result; try { result = input.replace("&", "&amp;").replace("\"", "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;"); } catch (Exception e) { result = "!!!! Error !!!!"; System.out.println(input); e.printStackTrace(); } return result; } public void addImport(Import newImport) { this.imports.add(newImport); } public void addConfig(ConfigFile newConfig) { tempConfigs.add(newConfig); } public void addModule(Module newMod) { if (modules.containsKey(newMod.getId())) { System.out.println("Warning: ModID: " + newMod.getId() + " belonging to " + newMod.getName() + " already exists in the list, and is being overwritten."); } modules.put(newMod.getId(), newMod); } public void setServerEntry(ServerList newEntry) { this.entry = newEntry; } public ServerList getServerEntry() { return entry; } public List<Import> getImports() { return imports; } public void assignConfigs(Map<String,String> issues, boolean debug) { System.out.println("Assigning configs to mods\n==============="); //this.modules.get(0).setConfigs(tempConfigs); Soundex snd = new Soundex(); int distance; for (ConfigFile config : tempConfigs) { int potential = 0; System.out.println(config.getPath() + ":"); Module tempModule = null; distance = 10000; String configName = config.getPath().substring(config.getPath().indexOf("/"), config.getPath().contains(".") ? config.getPath().lastIndexOf(".") : config.getPath().length()) ; for (Module mod : modules.values()) { try { int newDistance = StringUtils.getLevenshteinDistance(configName, mod.getId()); for (Map.Entry<String, String> exception : configExceptions.entrySet()) { if (config.getPath().contains(exception.getKey()) && mod.getId().matches(exception.getValue())) { newDistance -= 20; } } if (Arrays.asList(config.getPath().toLowerCase().split("/")).contains(mod.getId().toLowerCase())) { newDistance -= 20; } if (configName.toLowerCase().contains(mod.getId().toLowerCase())) { newDistance -= 10; } if (configName.toLowerCase().contains(mod.getId().toLowerCase().substring(0, mod.getId().length() < 3 ? mod.getId().length() : 3))) { newDistance -= 1; } if (snd.soundex(mod.getId()).equals(snd.soundex(configName))) { newDistance -= 10; } else if (snd.soundex(mod.getName()).equals(snd.soundex(configName))) { newDistance -= 10; } if (newDistance <= 5 || debug) { System.out.println(" >" + mod.getId() + " - " + newDistance + " (potential)"); potential++; } if (newDistance < distance) { tempModule = mod; distance = newDistance; } } catch (Exception e) { System.out.println("Problem with Mod " + mod.getName() + " (" + mod.getId() + ") and config " + config.getPath() + " (" + configName + ")"); e.printStackTrace(); } } if (tempModule != null) { System.out.println(config.getPath() + ": " + tempModule.getName() + " (" + distance + ")\n"); if (tempModule.getSide().equals(ModSide.CLIENT)) { config.setNoOverwrite(true); } if ((distance > 5 && potential > 1) || distance > 10) { issues.put(config.getPath(),tempModule.getName()); } modules.get(tempModule.getId()).getConfigs().add(config); } else { System.out.println(config.getPath() + " could not be assigned to a module!"); } } } public Map<String, Module> getModules() { return modules; } public List<Module> sortMods() { return MCUpdater.getInstance().sortMods(new ArrayList<>(modules.values())); } public void addForge(String mcVersion, String forgeVersion) { this.addImport(new Import("http://files.mcupdater.com/example/forge.php?mc=" + mcVersion + "&forge=" + forgeVersion, "forge")); this.addModule(new Module("Minecraft Forge", "forge-" + forgeVersion, new ArrayList<PrioritizedURL>(), null, "", true, ModType.Override, 0, false, false, true, "", new ArrayList<ConfigFile>(), "BOTH", "", new HashMap<String, String>(), "", "", new ArrayList<Submodule>(), "")); } public void addFabric(String mcVersion, String fabricVersion, String yarnVersion) { final String baseUrl = "https://fabricmc.net/download/mcupdater/"; final String fabricMainClass = "net.fabricmc.loader.launch.knot.KnotClient"; // if yarn version is unspecified, we need to look this up if ( yarnVersion.equals("latest") ) { final String mavenUrl = "https://maven.fabricmc.net/net/fabricmc/yarn/maven-metadata.xml"; System.out.println("Scanning "+mavenUrl+" for yarn version..."); try { final Document xml = ServerPackParser.readXmlFromUrl(mavenUrl); final String yarnPrefix = mcVersion+"."; String foundVersion = null; // NB: I despise sifting through random xml, so we're doing it quick and ugly NodeList tmp = xml.getElementsByTagName("versioning"); for( int i = 0; i < tmp.getLength(); ++i ) { NodeList children = tmp.item(i).getChildNodes(); for( int j = 0; j < children.getLength(); ++j ) { Node child = children.item(j); if( child.getNodeName().equals("release") ) { String version = child.getTextContent(); if( version.startsWith(yarnPrefix) ) { // our snapshot matches the current release, use that System.out.println("Current yarn release is "+version+", using"); foundVersion = version; } } else if( child.getNodeName().equals("versions") ) { // we're here, start digging NodeList versions = child.getChildNodes(); // scan backwards, the list should be sorted ascending for( int k = versions.getLength()-1; k > 0; --k ) { final Node v = versions.item(k); String version = v.getTextContent(); if( version.startsWith(yarnPrefix) ) { // we found one, use it System.out.println("Found yarn build "+version+", using"); foundVersion = version; break; } } } // export the version we found and go if( foundVersion != null ) { yarnVersion = foundVersion; break; } } } } catch( Exception e ) { System.out.println("Failed to parse yarn maven metadata xml, please specify yarn version manually."); } } this.addImport(new Import(baseUrl + "?yarn=" + yarnVersion + "&loader=" + fabricVersion, "fabric")); this.getServerEntry().setMainClass(fabricMainClass); } }
package org.oregami.data; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.persist.Transactional; import org.hibernate.envers.AuditReader; import org.hibernate.envers.AuditReaderFactory; import org.joda.time.LocalDateTime; import org.oregami.entities.BaseEntityUUID; import org.oregami.entities.CustomRevisionEntity; import org.oregami.entities.CustomRevisionListener; import org.oregami.entities.TopLevelEntity; import org.oregami.service.ServiceCallContext; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; public abstract class GenericDAOUUIDImpl<E extends BaseEntityUUID, P> implements GenericDAOUUID<E, P> { private final Provider<EntityManager> emf; @Inject public GenericDAOUUIDImpl(Provider<EntityManager> emf) { this.emf=emf; } Class<E> entityClass; @Override @Transactional @SuppressWarnings("unchecked") public P save(E entity) { entity.setChangeTime(new LocalDateTime()); emf.get().persist(entity); updateRevisionListener(entity); return (P) entity.getId(); } @Override public E findOne(P id) { return emf.get().find(getEntityClass(), id); } @Override @Transactional public void update(E entity) { updateRevisionListener(entity); entity.setChangeTime(new LocalDateTime()); emf.get().merge(entity); } @Override @Transactional public void delete(E entity) { emf.get().remove(entity); } @Override public EntityManager getEntityManager() { return emf.get(); } @Override @SuppressWarnings("unchecked") public Class<E> getEntityClass() { if (entityClass == null) { Type type = getClass().getGenericSuperclass(); if (type instanceof ParameterizedType) { ParameterizedType paramType = (ParameterizedType) type; entityClass = (Class<E>) paramType.getActualTypeArguments()[0]; } else { throw new IllegalArgumentException( "Could not guess entity class by reflection"); } } return entityClass; } @SuppressWarnings("unchecked") @Override public List<E> findAll() { return this.emf.get().createNamedQuery( getEntityClass().getSimpleName() + ".GetAll").getResultList(); } public EntityTransaction getTransaction() { return getEntityManager().getTransaction(); } protected void updateRevisionListener(BaseEntityUUID entity) { if (entity.getClass().isAnnotationPresent(TopLevelEntity.class)) { ServiceCallContext context = CustomRevisionListener.context.get(); if (context != null) { context.setEntityDiscriminator(entity.getClass().getAnnotation(TopLevelEntity.class).discriminator()); context.setEntityId(entity.getId()); } } } public List<RevisionInfo> findRevisions(String id) { List<RevisionInfo> list = new ArrayList<>(); AuditReader reader = AuditReaderFactory.get(getEntityManager()); List<Number> revisions = reader.getRevisions(getEntityClass(), id); for (Number n : revisions) { CustomRevisionEntity revEntity = reader.findRevision(CustomRevisionEntity.class, n); list.add(new RevisionInfo(n, revEntity)); } return list; } public E findRevision(String id, Number revision) { AuditReader reader = AuditReaderFactory.get(getEntityManager()); List<Number> revisions = reader.getRevisions(getEntityClass(), id); if (!revisions.contains(revision)) { return null; } E entity = reader.find(getEntityClass(), id, revision); return entity; } }
package org.scijava.menu; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import org.scijava.UIDetails; import org.scijava.event.EventHandler; import org.scijava.event.EventService; import org.scijava.module.ModuleInfo; import org.scijava.module.ModuleService; import org.scijava.module.event.ModulesAddedEvent; import org.scijava.module.event.ModulesRemovedEvent; import org.scijava.module.event.ModulesUpdatedEvent; import org.scijava.plugin.Parameter; import org.scijava.plugin.Plugin; import org.scijava.service.AbstractService; import org.scijava.service.Service; /** * Default service for keeping track of the application's menu structure. * * @author Curtis Rueden * @see ShadowMenu */ @Plugin(type = Service.class) public class DefaultMenuService extends AbstractService implements MenuService { @Parameter private EventService eventService; @Parameter private ModuleService moduleService; /** Menu tree structures. There is one structure per menu root. */ private HashMap<String, ShadowMenu> rootMenus; // -- MenuService methods -- @Override public ShadowMenu getMenu() { return getMenu(UIDetails.APPLICATION_MENU_ROOT); } @Override public ShadowMenu getMenu(final String menuRoot) { return rootMenus().get(menuRoot); } @Override public <T> T createMenus(final MenuCreator<T> creator, final T menu) { return createMenus(UIDetails.APPLICATION_MENU_ROOT, creator, menu); } @Override public <T> T createMenus(final String menuRoot, final MenuCreator<T> creator, final T menu) { creator.createMenus(getMenu(menuRoot), menu); return menu; } // -- Event handlers -- @EventHandler protected void onEvent(final ModulesAddedEvent event) { if (rootMenus == null) { // add *all* known modules, which includes the ones given here rootMenus(); return; } // data structure already exists; add *these* modules only addModules(event.getItems()); } @EventHandler protected void onEvent(final ModulesRemovedEvent event) { for (final ShadowMenu menu : rootMenus().values()) { menu.removeAll(event.getItems()); } } @EventHandler protected void onEvent(final ModulesUpdatedEvent event) { for (final ShadowMenu menu : rootMenus().values()) { menu.updateAll(event.getItems()); } } // -- Helper methods -- /** * Adds the given collection of modules to the menu data structure. * <p> * The menu data structure is created lazily via {@link #rootMenus()} if it * does not already exist. Note that this may result in a recursive call to * this method to populate the menus with the collection of modules currently * known by the {@link ModuleService}. * </p> */ private synchronized void addModules(final Collection<ModuleInfo> items) { // categorize modules by menu root final HashMap<String, ArrayList<ModuleInfo>> modulesByMenuRoot = new HashMap<String, ArrayList<ModuleInfo>>(); for (final ModuleInfo info : items) { final String menuRoot = info.getMenuRoot(); ArrayList<ModuleInfo> modules = modulesByMenuRoot.get(menuRoot); if (modules == null) { modules = new ArrayList<ModuleInfo>(); modulesByMenuRoot.put(menuRoot, modules); } modules.add(info); } // process each menu root separately for (final String menuRoot : modulesByMenuRoot.keySet()) { final ArrayList<ModuleInfo> modules = modulesByMenuRoot.get(menuRoot); ShadowMenu menu = rootMenus().get(menuRoot); if (menu == null) { // new menu root: create new menu structure menu = new ShadowMenu(getContext(), modules); rootMenus().put(menuRoot, menu); } else { // existing menu root: add to menu structure menu.addAll(modules); } } } /** * Lazily creates the {@link #rootMenus} data structure. * <p> * Note that the data structure is initially populated with all modules * available from the {@link ModuleService}, which is accomplished via a call * to {@link #addModules(Collection)}, which calls {@link #rootMenus()}, which * can result in a level of recursion. This is intended. * </p> */ private HashMap<String, ShadowMenu> rootMenus() { if (rootMenus == null) { initRootMenus(); } return rootMenus; } /** Initializes {@link #rootMenus}. */ private synchronized void initRootMenus() { if (rootMenus != null) return; final HashMap<String, ShadowMenu> map = new HashMap<String, ShadowMenu>(); final List<ModuleInfo> allModules = moduleService.getModules(); addModules(allModules); rootMenus = map; } }
package org.sports.ontology; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.TimeZone; import org.sports.ontology.model.DocumentModel; import org.sports.ontology.model.OntologyResult; import org.sports.ontology.model.PersonQuotes; import org.sports.ontology.model.ResultRelation; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.SimpleSelector; import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.rdf.model.StmtIterator; import com.hp.hpl.jena.util.FileManager; public class OntologyHandler { // create an empty Model private final Model model; public OntologyHandler() { this.model = ModelFactory.createDefaultModel(); model.setNsPrefix("sports", SportsOntology.getURI()); } private Date decodeDate(String dateStr) throws ParseException { TimeZone tz = TimeZone.getTimeZone("UTC"); SimpleDateFormat df = new SimpleDateFormat("EEE MMM dd HH:mm:ss z yyyy"); df.setTimeZone(tz); Date date = df.parse(dateStr); return date; } public void open(String inputFileName) { // use the FileManager to find the input file InputStream in = FileManager.get().open(inputFileName); if (in != null) { // + " not found"); // read the RDF/XML file model.read(in, null); // write it to standard out // model.write(System.out); } } public void addPersonQuote(PersonQuotes quotes, Resource document) { String person = quotes.getPerson(); for (String quote : quotes.getQuotes()) { document.addProperty( SportsOntology.QUOTE, model.createResource() .addProperty(SportsOntology.QUOTEDTEXT, quote) .addProperty(SportsOntology.PERSONNAME, person)); } } public void addResultRelation(ResultRelation resultRelation, Resource document) { List<String> competitors = resultRelation.getCompetitors(); document.addProperty(SportsOntology.EVENT, model.createResource() .addProperty(SportsOntology.RESULT, resultRelation.getResult())); for (String competitor : competitors) { document.addProperty(SportsOntology.COMPETITOR, competitor); } } public Resource registerDocument(DocumentModel docModel) { Resource document = model .createResource(docModel.getUrl()) .addProperty(SportsOntology.DOCUMENT, docModel.getKey()) .addProperty(SportsOntology.DATE, docModel.getDate().toString()); return document; } public List<PersonQuotes> getQuotes(final String personName, final Date afterDate, final Date beforeDate) { List<PersonQuotes> result = new ArrayList<PersonQuotes>(); StmtIterator iter = model.listStatements(new SimpleSelector(null, SportsOntology.QUOTE, (RDFNode) null) { @Override public boolean selects(Statement s) { boolean found = true; if (found && personName != null && personName != "") { String ontoPerson = s .getProperty(SportsOntology.PERSONNAME).getString(); found = found && ontoPerson.equalsIgnoreCase(personName); } if (found && (afterDate != null || beforeDate != null)) { String startDate = s.getSubject() .getProperty(SportsOntology.DATE).getString(); Date date; try { date = decodeDate(startDate); } catch (ParseException e) { // TODO Auto-generated catch block e.printStackTrace(); return false; } if (afterDate != null) { found = found && (afterDate != null && date .compareTo(afterDate) >= 0); } if (beforeDate != null) { found = found && (beforeDate != null && date .compareTo(beforeDate) <= 0); } } return found; } }); if (iter.hasNext()) { while (iter.hasNext()) { Statement stmn = iter.nextStatement(); String quote = stmn.getProperty(SportsOntology.QUOTEDTEXT) .getString(); String person = stmn.getProperty(SportsOntology.PERSONNAME) .getString(); PersonQuotes personQuote = new PersonQuotes(); personQuote.addQuote(quote); personQuote.setPerson(person); result.add(personQuote); } } return result; } public OntologyResult query(final String docURI) { OntologyResult result = new OntologyResult(); StmtIterator iter = model.listStatements(new SimpleSelector(null, SportsOntology.QUOTE, (RDFNode) null) { @Override public boolean selects(Statement s) { Statement docUrl = s.getSubject().getProperty( SportsOntology.DOCUMENT); return (docUrl != null) && docUrl.getString().equalsIgnoreCase(docURI); } }); if (iter.hasNext()) { while (iter.hasNext()) { Statement stmn = iter.nextStatement(); String quote = stmn.getProperty(SportsOntology.QUOTEDTEXT) .getString(); String person = stmn.getProperty(SportsOntology.PERSONNAME) .getString(); PersonQuotes personQuote = new PersonQuotes(); personQuote.addQuote(quote); personQuote.setPerson(person); result.addQuote(personQuote); } } return result; } public void print() { model.write(System.out); } public void save(String outputFileName) throws FileNotFoundException { OutputStream out = new FileOutputStream(outputFileName); model.write(out); } }
package org.xtx.ut4converter.ucore; import java.io.File; import java.util.HashSet; import java.util.Set; import org.xtx.ut4converter.UTGames; import org.xtx.ut4converter.t3d.T3DRessource.Type; /** * Very basic implementation of unreal package * @author XtremeXp */ public class UPackage { /** * UT game this package comes from */ private UTGames.UTGame game; /** * Name of package */ String name; /** * File of package */ File file; /** * Package ressources (textures, staticmeshes, ...) */ Set<UPackageRessource> ressources = new HashSet<>(); /** * Type of package (level, sound, textures, ...) * TODO remove some package may not contain only one type of ressource * (e.g: map packages) */ public Type type; /** * * @param name Package Name * @param type Type of package (sounds, textures, ...) * @param game UT game this package belong to * @param uRessource */ public UPackage(String name, Type type, UTGames.UTGame game, UPackageRessource uRessource){ this.name = name; this.type = type; this.game = game; ressources.add(uRessource); } public String getName() { return name; } /** * Gets the associated file with this package. * @param gamePath Base path of the ut game this unreal package comes from * @return */ public File getFileContainer(File gamePath){ if(this.file != null){ return this.file; } // refactor this if(type == Type.LEVEL){ this.file = new File(name); } else { this.file = new File(gamePath.getAbsolutePath() + File.separator + getFileFolder() + File.separator + getName() + getFileExtension()); } return this.file; } public void setFile(File file) { this.file = file; } public void addRessource(UPackageRessource ressource){ ressources.add(ressource); } /** * List all ressources of packages that have been exported * @return List of exported ressources */ public Set<File> getExportedFiles() { Set<File> exportedFiles = new HashSet<>(); for(UPackageRessource upr : ressources){ if(upr.getExportedFile() != null){ exportedFiles.add(upr.getExportedFile()); } } return exportedFiles; } /** * Returns ressource package by full name * @param fullName Full ressource name (e.g: "AmbAncient.Looping.Stower51") * @return ressource with same full name */ public UPackageRessource findRessource(String fullName){ for(UPackageRessource packageRessource : ressources){ if(fullName.equals(packageRessource.getFullName())){ return packageRessource; } } return null; } /** * Get ressources used by the package. * The ressource list is built on extracting ressource packages * with unreal package extractor * @return List of ressources of the package */ public Set<UPackageRessource> getRessources() { return ressources; } /** * Return path where unreal packages are stored depending * on type of ressource * @return Relative folder from UT path where the unreal package file should be */ private String getFileFolder(){ if(type == Type.MUSIC){ return "Music"; } else if (type == Type.SOUND){ return "Sounds"; } else if (type == Type.TEXTURE){ return "Textures"; } else if (type == Type.STATICMESH){ return "StaticMeshes"; } else if (type == Type.LEVEL){ return "Maps"; } else if (type == Type.SCRIPT){ return "System"; } return null; } /** * Return relative path * @return */ private String getFileExtension(){ if(type == Type.MUSIC){ return ".umx"; } else if (type == Type.SOUND){ return ".uax"; } else if (type == Type.TEXTURE){ return ".utx"; } else if (type == Type.STATICMESH){ return ".usx"; } else if (type == Type.SCRIPT){ return ".u"; } else if (type == Type.LEVEL){ return ".unr"; } return null; } }
package org.zendesk.client.v2.model; import com.fasterxml.jackson.annotation.JsonProperty; import java.io.Serializable; import java.util.Arrays; import java.util.Date; import java.util.List; /** * @author stephenc * @since 09/04/2013 15:09 */ public class Comment implements Serializable { private static final long serialVersionUID = 1L; private Long id; private String body; private String htmlBody; private Long authorId; private List<String> uploads; private List<Attachment> attachments; private Date createdAt; private Boolean publicComment; public Comment() { } public Comment(String body) { this.body = body; } public Comment(String body, String... uploads) { this.body = body; this.uploads = uploads.length == 0 ? null : Arrays.asList(uploads); } public String getBody() { return body; } public void setBody(String body) { this.body = body; } @JsonProperty("html_body") public String getHtmlBody() { return htmlBody; } public void setHtmlBody(String htmlBody) { this.htmlBody = htmlBody; } public List<String> getUploads() { return uploads; } public void setUploads(List<String> uploads) { this.uploads = uploads; } public List<Attachment> getAttachments() { return attachments; } public void setAttachments(List<Attachment> attachments) { this.attachments = attachments; } @JsonProperty("author_id") public Long getAuthorId() { return authorId; } public void setAuthorId(Long authorId) { this.authorId = authorId; } @JsonProperty("created_at") public Date getCreatedAt() { return createdAt; } public void setCreatedAt(Date createdAt) { this.createdAt = createdAt; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } @JsonProperty("public") public Boolean isPublic() { return publicComment; } public void setPublic(Boolean isPublic) { this.publicComment = isPublic; } @Override public String toString() { final StringBuilder sb = new StringBuilder("Comment{"); sb.append("id=").append(id); sb.append(", body='").append(body).append('\''); sb.append(", authorId=").append(authorId); sb.append(", attachments=").append(attachments); sb.append(", createdAt=").append(createdAt); sb.append(", uploads=").append(uploads); sb.append('}'); return sb.toString(); } }
package org.zeropage.basic; import org.zeropage.LinkSource; import org.zeropage.PathFinder; import org.zeropage.log.Logger; import org.zeropage.Path; import javax.validation.constraints.NotNull; import java.util.*; /** * Simple implementation of PathFinder interface * Designed with only essential functionalities */ public class SimplePathFinder implements PathFinder { private LinkSource source; private Logger logger; /** * * @param source linksource where path data should be retrieved from */ public SimplePathFinder(@NotNull LinkSource source) { this.source = source; logger = Logger.getInstance(); } /** * * @param from Node where to start the searching * @param to Target node where to end the searching * @return Path describes the shortest path btw from - to * @throws Exception When problem occurs while finding the path(ex.network problem) */ @Override public Path getPath(String from, String to) throws Exception { Map<String, String> parents = new HashMap<>(); Queue<String> currentQueue = new LinkedList<>(); Queue<String> nextQueue; int step = 1; boolean pathFound = false; currentQueue.add(from); while (!pathFound && !currentQueue.isEmpty()) { logStep(currentQueue, step); nextQueue = new LinkedList<>(); while (!pathFound && !currentQueue.isEmpty()) { String currentNode = currentQueue.poll(); logCurrentNode(step, currentNode); Set<String> connectedLinks = source.getLinks(currentNode); if (connectedLinks == null) { continue; } for (String connectedNode : connectedLinks) { if (!parents.containsKey(connectedNode)) { parents.put(connectedNode, currentNode); nextQueue.add(connectedNode); } if (to.equals(connectedNode)) { pathFound = true; break; } } } currentQueue = nextQueue; step++; } if (pathFound) { return new ConcretePath(getResult(parents, from, to)); } else { return null; } } private void logCurrentNode(int step, String currentNode) { logger.debug(String.format("Step %d, %s", step, currentNode)); } private void logStep(Queue<String> currentQueue, int step) { logger.info(String.format("Step %d, Queue size: %d", step, currentQueue.size())); } private ArrayList<String> getResult(Map<String, String> parents, String from, String to) { ArrayList<String> path = new ArrayList<>(); String current = to; while (!current.equals(from)) { path.add(current); current = parents.get(current); } path.add(current); Collections.reverse(path); return path; } }
package othlon.cherrypig.blocks; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import net.minecraft.block.BlockLog; import net.minecraft.block.BlockWood; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.IIcon; import net.minecraft.world.World; import othlon.cherrypig.CherryPig; import java.util.List; public class CPCherryLog extends BlockLog { @SideOnly(Side.CLIENT) IIcon logIcon; IIcon logIconTopBtm; public CPCherryLog() { this.setHardness(2.0F); this.setHarvestLevel("axe", 0); this.setResistance(5.0F); this.setStepSound(soundTypeWood); this.setBlockName("CherryPig Wood"); this.setCreativeTab(CherryPig.tabCherryPig); } @Override public void getSubBlocks(Item p_149666_1_, CreativeTabs p_149666_2_, List p_149666_3_) { //noinspection unchecked p_149666_3_.add(new ItemStack(p_149666_1_, 1, 0)); } //nope @Override public void registerBlockIcons(IIconRegister thisdude) { logIcon = thisdude.registerIcon("cherrypig:cherrypig_treebark"); logIconTopBtm = thisdude.registerIcon("cherrypig:cherrypig_treelog"); } @Override @SideOnly(Side.CLIENT) protected IIcon getSideIcon(int meta) { return this.logIcon; } @Override @SideOnly(Side.CLIENT) protected IIcon getTopIcon(int meta) { return this.logIconTopBtm; } /*@Override public int onBlockPlaced( World world, int x, int y, int z, int side, float hitx, float hity, float hitz, int meta) { byte orientation = 0; switch(side) { case 0: case 1: orientation = 0; break; case 2: case 3: orientation = 8; break; case 4: case 5: orientation = 4; } return orientation; }*/ }
package prm4j.indexing.realtime; import java.lang.ref.ReferenceQueue; import prm4j.Globals; import prm4j.api.ParametricMonitor; import prm4j.indexing.staticdata.NodeFactory; /** * Coordinates the garbage collection of monitors which can never reach an accepting state. Provides some diagnostics * regarding node creation. */ public class NodeManager { /** * Interval for polling the reference queue for {@link NodeRef}s after garbage-collection of {@link Node}s. The * interval is measured in number of events processed by the {@link ParametricMonitor} (aka 'timestamp'). Larger * numbers (e.g. 100000) have proven sufficient, since {@link NodeRef}s containing the orphaned monitors usually get * garbage collected themselves quite quickly. */ private final static int CLEANING_INTERVAL = Globals.MONITOR_CLEANING_INTERVAL; /** * In cases, where Globals.MONITOR_CLEANING_INTERVAL == Globals.BINDING_CLEANING_INTERVAL it is not advised to * perform them at the same point in time. The shift prevents this effect, as it moves the point of cleaning half * the interval into the future. */ private final static int CLEANING_INTERVAL_SHIFT = CLEANING_INTERVAL / 2; /** * The number of created nodes by each {@link NodeFactory}. */ private long createdNodeCount; /** * The number of monitors for which their associated {@link Node} has been garbage collected. */ private long orphanedMonitors; /** * The number of orphaned monitors that could never reach an accepting state and got garbage collected. */ private long collectedMonitors; /** * Contains {@link NodeRef}s. */ private final ReferenceQueue<Node> referenceQueue; public NodeManager() { referenceQueue = new ReferenceQueue<Node>(); } /** * Calls {@link #reallyClean()} each time the cleaning interval is reached. * * @param timestamp */ public void tryToClean(long timestamp) { if (timestamp % CLEANING_INTERVAL == CLEANING_INTERVAL_SHIFT) { reallyClean(); } } /** * Polls all expired {@link NodeRef}s and nullifies all monitors which can never reach an accepting state. */ public void reallyClean() { NodeRef nodeRef = (NodeRef) referenceQueue.poll(); while (nodeRef != null) { orphanedMonitors++; if (nodeRef.monitor != null && !nodeRef.monitor.isAcceptingStateReachable()) { nodeRef.monitor = null; collectedMonitors++; } nodeRef = (NodeRef) referenceQueue.poll(); } } /** * DIAGNOSTIC: Called by each {@link NodeFactory} each time a node has been created. * * @param node */ public void createdNode(Node node) { createdNodeCount++; } /** * DIAGNOSTIC: Returns the number of created nodes by each {@link NodeFactory}. * * @return the number of created nodes */ public long getCreatedCount() { return createdNodeCount; } public ReferenceQueue<Node> getReferenceQueue() { return referenceQueue; } /** * DIAGNOSTIC: Returns the number of monitors for which their associated {@link Node} has been garbage collected. * * @return the number of orphaned monitors */ public long getOrphanedMonitorsCount() { return orphanedMonitors; } /** * DIAGNOSTIC: Returns the number of orphaned monitors that could never reach an accepting state and got garbage * collected. * * @return the number of garbage collected monitors */ public long getCollectedMonitorsCount() { return collectedMonitors; } /** * Resets all internal diagnostic counters. */ public void reset() { reallyClean(); createdNodeCount = 0L; createdNodeCount = 0L; collectedMonitors = 0L; orphanedMonitors = 0L; reallyClean(); } }
package seedu.address.model.person; import java.util.Date; import java.text.ParseException; import java.text.SimpleDateFormat; import seedu.address.commons.exceptions.IllegalValueException; public class TaskDate { public static final String MESSAGE_DATE_CONSTRAINTS = "Task date should only follow this format dd-mm-yyyy"; public static final String TASKDATE_VALIDATION_REGEX = "(0?[1-9]|[12][0-9]|3[01])-(0?[1-9]|1[012])-((19|20)\\d\\d)"; public Date value; public String dateString; public TaskDate() { this.dateString = ""; } public TaskDate(String dateString) throws IllegalValueException, ParseException { assert dateString != null; this.dateString = dateString.trim(); if (!isValidTaskDate(dateString)) { throw new IllegalValueException(MESSAGE_DATE_CONSTRAINTS); } SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd-MM-yyyy"); Date date = simpleDateFormat.parse(dateString); this.value = date; } /** * Returns true if a given string is a valid date. */ public static boolean isValidTaskDate(String test) { return test.matches(TASKDATE_VALIDATION_REGEX); } public Date getValue() { return value; } @Override public String toString() { // return value.toString(); return dateString; } @Override public boolean equals(Object other) { if (other != null) { return other == this // short circuit if same object || (other instanceof TaskDate // instance of handles nulls && this.value.equals(((TaskDate) other).value)); // state check } else return false; } @Override public int hashCode() { return value.hashCode(); } }
package org.apache.james.userrepository; import org.apache.avalon.blocks.*; import org.apache.avalon.*; import org.apache.avalon.utils.*; import java.util.*; import java.io.*; import com.workingdogs.town.*; /** * Implementation of a Repository to store users in database. * @version 1.0.0, 10/01/2000 * @author Ivan Seskar, Upside Technologies <seskar@winlab.rutgers.edu> */ public class UsersTownRepository implements UsersRepository, Configurable { private String name; private String type; private String model; private String destination; private String prefix; private String repositoryName; private String conndefinition; private String tableName; // System defined logger funtion private ComponentManager comp; private Logger logger; // Constructor - empty public UsersTownRepository() { } // Methods from interface Repository public void setAttributes(String name, String destination, String type, String model) { this.name = name; this.model = model; this.type = type; this.destination = destination; int slash = destination.indexOf(" prefix = destination.substring(0, slash + 2); repositoryName = destination.substring(slash + 2); } public void setComponentManager(ComponentManager comp) { this.comp = comp; // Store logger this.logger = (Logger) comp.getComponent(Interfaces.LOGGER); } public void setConfiguration(Configuration conf) { conndefinition = conf.getConfiguration("conn").getValue(); tableName = conf.getConfiguration("table").getValue("Users"); } public String getName() { return name; } public String getType() { return type; } public String getModel() { return model; } public String getChildDestination(String childName) { return prefix + repositoryName + "/" + childName; } public synchronized void addUser(String strUserName, Object attributes) { try { TableDataSet MRUser = new TableDataSet(ConnDefinition.getInstance(conndefinition), tableName); MRUser.setWhere("UserName = '" + strUserName+"'"); Record user = null; if (MRUser.size() == 0) { // file://Add new user user = MRUser.addRecord(); user.setValue("username", strUserName); user.setValue("password", attributes.toString()); user.save(); } else { // file://User already exists: reject add logger.log("User "+strUserName+" already exists.", "UserManager", logger.WARNING); // old Avalon logger format } } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("Exception caught while storing user: " + e); } } public synchronized Object getAttributes(String strUserName) { try { TableDataSet MRUser = TableDataSet(ConnDefinition.getInstance(conndefinition), tableName); MRUser.setWhere("UserName = '" + strUserName+"'"); if (MRUser.size() == 0) { logger.log("User "+strUserName+" could not be found while fetching password.", "UserManager", logger.WARNING); return(null); } else { Record user = MRUser.getRecord(0); return ((Object) user.getAsString("Password")); } } catch (Exception e) { throw new RuntimeException("Exception while retrieving password: " + e.getMessage()); } } public synchronized void removeUser(String strUserName) { try { TableDataSet MRUser = new TableDataSet(ConnDefinition.getInstance(conndefinition), tableName); MRUser.setWhere("UserName = '" + strUserName + "'"); if (MRUser.size() == 0) { // file://User doesn't exists: reject delete logger.log("User: " + strUserName + " does not exist. Cannot delete", "UserManager", logger.WARNING); } else { Record user = MRUser.getRecord(0); user.markToBeDeleted(); user.save(); } } catch (Exception e) { throw new RuntimeException("Exception while deleting user: " + e.getMessage()); } } public boolean contains(String strUserName) { try { TableDataSet MRUser = new TableDataSet(ConnDefinition.getInstance(conndefinition), tableName); MRUser.setWhere("UserName = '" + strUserName + "'"); if (MRUser.size() > 0) { return true; // User exists } else { return false; // User does not exist } } catch (Exception e) { throw new RuntimeException("Exception while retrieving user: " + e.getMessage()); } } public boolean test(String strUserName, Object attributes) { try { TableDataSet MRUser = new TableDataSet(ConnDefinition.getInstance(conndefinition), tableName); MRUser.setWhere("UserName = '" + strUserName + "'"); if (MRUser.size() > 0) { // UserName exists - check if the password is OK Record user = MRUser.getRecord(0); return(user.getAsString("Password").equals(attributes.toString())); } else { // file://UserName does not exist logger.log("User "+strUserName+" doesn't exist", "UserManager", logger.WARNING); return(false); } } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("Exception caught while testing UserName: " + e.getMessage()); } } public int countUsers() { try { TableDataSet MRUser = new TableDataSet(ConnDefinition.getInstance(conndefinition), tableName); MRUser.setWhere("UserName = *"); int nSize = MRUser.size(); return (int) nSize; } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("Exception caught while testing UserName: " + e.getMessage()); } } public Enumeration list() { Vector list = new Vector(); try { QueryDataSet users = new QueryDataSet(ConnDefinition.getInstance(conndefinition), "SELECT * FROM " + tableName); for (int i = 0; i < users.size(); i++) { list.add(users.getRecord(i).getAsString("UserName")); } } catch (Exception e) { logger.log("Problem listing mailboxes. " + e ,"UserManager", logger.ERROR); e.printStackTrace(); throw new RuntimeException("Exception while listing users: " + e.getMessage()); } return list.elements(); } }
package org.ensembl.healthcheck.testcase.generic; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.ensembl.healthcheck.AssemblyNameInfo; import org.ensembl.healthcheck.DatabaseRegistryEntry; import org.ensembl.healthcheck.DatabaseType; import org.ensembl.healthcheck.ReportManager; import org.ensembl.healthcheck.Species; import org.ensembl.healthcheck.Team; import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase; import org.ensembl.healthcheck.util.DBUtils; import org.ensembl.healthcheck.util.SqlTemplate; import org.ensembl.healthcheck.util.Utils; /** * Checks that meta_value contents in the meta table are OK. Only one meta table at a time is done here; checks for the consistency of the * meta table across species are done in MetaCrossSpecies. */ public class MetaValues extends SingleDatabaseTestCase { private boolean isSangerVega = false; public MetaValues() { addToGroup("post_genebuild"); addToGroup("release"); addToGroup("compara-ancestral"); addToGroup("pre-compara-handover"); addToGroup("post-compara-handover"); setTeamResponsible(Team.RELEASE_COORDINATOR); setSecondTeamResponsible(Team.GENEBUILD); setDescription("Check that meta_value contents in the meta table are OK"); } /** * Checks that meta_value contents in the meta table are OK. * * @param dbre * The database to check. * @return True if the test passed. */ public boolean run(final DatabaseRegistryEntry dbre) { isSangerVega = dbre.getType() == DatabaseType.SANGER_VEGA; boolean result = true; Connection con = dbre.getConnection(); Species species = dbre.getSpecies(); if (species == Species.ANCESTRAL_SEQUENCES) { // The rest of the tests are not relevant for the ancestral sequences DB return result; } if (!isSangerVega) {// do not check for sangervega result &= checkOverlappingRegions(con); } result &= checkAssemblyMapping(con); result &= checkTaxonomyID(dbre); result &= checkAssemblyWeb(dbre); if (dbre.getType() == DatabaseType.CORE) { result &= checkDates(dbre); result &= checkGenebuildID(con); } result &= checkCoordSystemTableCases(con); result &= checkBuildLevel(dbre); result &= checkSample(dbre); //Use an AssemblyNameInfo object to get the assembly information AssemblyNameInfo assembly = new AssemblyNameInfo(con); String metaTableAssemblyDefault = assembly.getMetaTableAssemblyDefault(); logger.finest("assembly.default from meta table: " + metaTableAssemblyDefault); String dbNameAssemblyVersion = assembly.getDBNameAssemblyVersion(); logger.finest("Assembly version from DB name: " + dbNameAssemblyVersion); String metaTableAssemblyVersion = assembly.getMetaTableAssemblyVersion(); logger.finest("meta table assembly version: " + metaTableAssemblyVersion); String metaTableAssemblyPrefix = assembly.getMetaTableAssemblyPrefix(); logger.finest("meta table assembly prefix: " + metaTableAssemblyPrefix); if (metaTableAssemblyVersion == null || metaTableAssemblyDefault == null || metaTableAssemblyPrefix == null || dbNameAssemblyVersion == null) { ReportManager.problem(this, con, "Cannot get all information from meta table - check for null values"); } else { // Check that assembly prefix is valid and corresponds to this species // Prefix is OK as long as it starts with the valid one Species dbSpecies = dbre.getSpecies(); String correctPrefix = Species.getAssemblyPrefixForSpecies(dbSpecies); if (!isSangerVega) {// do not check this for sangervega if (correctPrefix == null) { logger.info("Can't get correct assembly prefix for " + dbSpecies.toString()); } else { if (!metaTableAssemblyPrefix.toUpperCase().startsWith(correctPrefix.toUpperCase())) { ReportManager.problem(this, con, "Database species is " + dbSpecies + " but assembly prefix " + metaTableAssemblyPrefix + " should have prefix beginning with " + correctPrefix); result = false; } } } } result &= checkGenebuildMethod(dbre); result &= checkAssemblyAccessionUpdate(dbre); result &= checkRepeatAnalysis(dbre); result &= checkForSchemaPatchLineBreaks(dbre); return result; } // run // this HC will check the Meta table contains the assembly.overlapping_regions and // that it is set to false (so no overlapping regions in the genome) private boolean checkOverlappingRegions(Connection con) { boolean result = true; // check that certain keys exist String[] metaKeys = { "assembly.overlapping_regions" }; for (int i = 0; i < metaKeys.length; i++) { String metaKey = metaKeys[i]; int rows = DBUtils.getRowCount(con, "SELECT COUNT(*) FROM meta WHERE meta_key='" + metaKey + "'"); if (rows == 0) { result = false; ReportManager.problem(this, con, "No entry in meta table for " + metaKey + ". It might need to run the misc-scripts/overlapping_regions.pl script"); } else { String[] metaValue = DBUtils.getColumnValues(con, "SELECT meta_value FROM meta WHERE meta_key='" + metaKey + "'"); if (metaValue[0].equals("1")) { // there are overlapping regions !! API might behave oddly ReportManager.problem(this, con, "There are overlapping regions in the database (e.g. two versions of the same chromosomes). The API" + " might have unexpected results when trying to map features to that coordinate system."); result = false; } } } return result; } private boolean checkAssemblyMapping(Connection con) { boolean result = true; // Check formatting of assembly.mapping entries; should be of format // coord_system1{:default}|coord_system2{:default} with optional third // coordinate system // and all coord systems should be valid from coord_system // can also have # instead of | as used in unfinished contigs etc Pattern assemblyMappingPattern = Pattern.compile("^([a-zA-Z0-9.]+):?([a-zA-Z0-9._]+)?[\\|#]([a-zA-Z0-9._]+):?([a-zA-Z0-9._]+)?([\\|#]([a-zA-Z0-9.]+):?([a-zA-Z0-9._]+)?)?$"); String[] validCoordSystems = DBUtils.getColumnValues(con, "SELECT name FROM coord_system"); String[] mappings = DBUtils.getColumnValues(con, "SELECT meta_value FROM meta WHERE meta_key='assembly.mapping'"); for (int i = 0; i < mappings.length; i++) { Matcher matcher = assemblyMappingPattern.matcher(mappings[i]); if (!matcher.matches()) { result = false; ReportManager.problem(this, con, "Coordinate system mapping " + mappings[i] + " is not in the correct format"); } else { // if format is OK, check coord systems are valid boolean valid = true; String cs1 = matcher.group(1); String assembly1 = matcher.group(2); String cs2 = matcher.group(3); String assembly2 = matcher.group(4); String cs3 = matcher.group(6); String assembly3 = matcher.group(7); if (!Utils.stringInArray(cs1, validCoordSystems, false)) { valid = false; ReportManager.problem(this, con, "Source co-ordinate system " + cs1 + " is not in the coord_system table"); } if (!Utils.stringInArray(cs2, validCoordSystems, false)) { valid = false; ReportManager.problem(this, con, "Target co-ordinate system " + cs2 + " is not in the coord_system table"); } // third coordinate system is optional if (cs3 != null && !Utils.stringInArray(cs3, validCoordSystems, false)) { valid = false; ReportManager.problem(this, con, "Third co-ordinate system in mapping (" + cs3 + ") is not in the coord_system table"); } if (valid) { ReportManager.correct(this, con, "Coordinate system mapping " + mappings[i] + " is OK"); } result &= valid; // check that coord_system:version pairs listed here exist in the coord_system table result &= checkCoordSystemVersionPairs(con, cs1, assembly1, cs2, assembly2, cs3, assembly3); // check that coord systems are specified in lower-case result &= checkCoordSystemCase(con, cs1, "meta assembly.mapping"); result &= checkCoordSystemCase(con, cs2, "meta assembly.mapping"); result &= checkCoordSystemCase(con, cs3, "meta assembly.mapping"); } } return result; } /** * Check that coordinate system:assembly pairs in assembly.mappings match what's in the coord system table */ private boolean checkCoordSystemVersionPairs(Connection con, String cs1, String assembly1, String cs2, String assembly2, String cs3, String assembly3) { boolean result = true; List<String> coordSystemsAndVersions = DBUtils.getColumnValuesList(con, "SELECT CONCAT_WS(':',name,version) FROM coord_system"); result &= checkCoordSystemPairInList(con, cs1, assembly1, coordSystemsAndVersions); result &= checkCoordSystemPairInList(con, cs2, assembly2, coordSystemsAndVersions); if (cs3 != null) { result &= checkCoordSystemPairInList(con, cs3, assembly3, coordSystemsAndVersions); } return result; } /** * Check if a particular coordinate system:version pair is in a list. Deal with nulls appropriately. */ private boolean checkCoordSystemPairInList(Connection con, String cs, String assembly, List<String> coordSystems) { boolean result = true; String toCompare = (assembly != null) ? cs + ":" + assembly : cs; if (!coordSystems.contains(toCompare)) { ReportManager.problem(this, con, "Coordinate system name/version " + toCompare + " in assembly.mapping does not appear in coord_system table."); result = false; } return result; } /** * @return true if cs is all lower case (or null), false otherwise. */ private boolean checkCoordSystemCase(Connection con, String cs, String desc) { if (cs == null) { return true; } boolean result = true; if (cs.equals(cs.toLowerCase())) { ReportManager.correct(this, con, "Co-ordinate system name " + cs + " all lower case in " + desc); result = true; } else { ReportManager.problem(this, con, "Co-ordinate system name " + cs + " is not all lower case in " + desc); result = false; } return result; } /** * Check that all coord systems in the coord_system table are lower case. */ private boolean checkCoordSystemTableCases(Connection con) { // TODO - table name in report boolean result = true; String[] coordSystems = DBUtils.getColumnValues(con, "SELECT name FROM coord_system"); for (int i = 0; i < coordSystems.length; i++) { result &= checkCoordSystemCase(con, coordSystems[i], "coord_system"); } return result; } private boolean checkTaxonomyID(DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); // Check that the taxonomy ID matches a known one. // The taxonomy ID-species mapping is held in the Species class. Species species = dbre.getSpecies(); String dbTaxonID = DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='species.taxonomy_id'"); logger.finest("Taxonomy ID from database: " + dbTaxonID); if (dbTaxonID.equals(Species.getTaxonomyID(species))) { ReportManager.correct(this, con, "Taxonomy ID " + dbTaxonID + " is correct for " + species.toString()); } else { result = false; ReportManager.problem(this, con, "Taxonomy ID " + dbTaxonID + " in database is not correct - should be " + Species.getTaxonomyID(species) + " for " + species.toString()); } return result; } private boolean checkAssemblyWeb(DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); // Check that the taxonomy ID matches a known one. // The taxonomy ID-species mapping is held in the Species class. String[] allowedTypes = {"GenBank Assembly ID", "EMBL-Bank WGS Master"}; String[] allowedSources = {"NCBI", "ENA", "DDBJ"}; String WebType = DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='assembly.web_accession_type'"); String WebSource = DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='assembly.web_accession_source'"); if (WebType.length() > 0) { if (!Utils.stringInArray(WebType, allowedTypes, true)) { result = false; ReportManager.problem(this, con, "Web accession type " + WebType + " is not allowed"); } } if (WebSource.length() > 0) { if (!Utils.stringInArray(WebSource, allowedSources, true)) { result = false; ReportManager.problem(this, con, "Web accession source " + WebSource + " is not allowed"); } } return result; } private boolean checkDates(DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); String[] keys = { "genebuild.start_date", "assembly.date", "genebuild.initial_release_date", "genebuild.last_geneset_update" }; String date = "[0-9]{4}-[0-9]{2}"; String[] regexps = { date + "-[a-zA-Z]*", date, date, date }; for (int i = 0; i < keys.length; i++) { String key = keys[i]; String regexp = regexps[i]; String value = DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='" + key + "'"); if (value == null || value.length() == 0) { ReportManager.problem(this, con, "No " + key + " entry in meta table"); result = false; } result &= checkMetaKey(con, key, value, regexp); if (result) { result &= checkDateFormat(con, key, value); } if (result) { ReportManager.correct(this, con, key + " is present & in a valid format"); } } // some more checks for sanity of dates int startDate = Integer.valueOf(DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='genebuild.start_date'").replaceAll("[^0-9]", "")).intValue(); int initialReleaseDate = Integer.valueOf(DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='genebuild.initial_release_date'").replaceAll("[^0-9]", "")).intValue(); int lastGenesetUpdate = Integer.valueOf(DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='genebuild.last_geneset_update'").replaceAll("[^0-9]", "")).intValue(); // check for genebuild.start_date >= genebuild.initial_release_date (not allowed as we cannot release a gene set before // downloaded the evidence) if (startDate >= initialReleaseDate) { result = false; ReportManager.problem(this, con, "genebuild.start_date is greater than or equal to genebuild.initial_release_date"); } // check for genebuild.initial_release_date > genebuild.last_geneset_update (not allowed as we cannot update a gene set before // its initial public release) if (initialReleaseDate > lastGenesetUpdate) { result = false; ReportManager.problem(this, con, "genebuild.initial_release_date is greater than or equal to genebuild.last_geneset_update"); } // check for current genebuild.last_geneset_update <= previous release genebuild.last_geneset_update // AND the number of genes or transcripts or exons between the two releases has changed // If the gene set has changed in any way since the previous release then the date should have been updated. DatabaseRegistryEntry previous = getEquivalentFromSecondaryServer(dbre); if (previous == null) { return result; } Connection previousCon = previous.getConnection(); String previousLastGenesetUpdateString = DBUtils.getRowColumnValue(previousCon, "SELECT meta_value FROM meta WHERE meta_key='genebuild.last_geneset_update'").replaceAll("-", ""); if (previousLastGenesetUpdateString == null || previousLastGenesetUpdateString.length() == 0) { ReportManager.problem(this, con, "Problem parsing last geneset update entry from previous database."); return false; } int previousLastGenesetUpdate; try { previousLastGenesetUpdate = Integer.valueOf(previousLastGenesetUpdateString).intValue(); } catch (NumberFormatException e) { ReportManager.problem(this, con, "Problem parsing last geneset update entry from previous database: " + Arrays.toString(e.getStackTrace())); return false; } if (lastGenesetUpdate <= previousLastGenesetUpdate) { int currentGeneCount = DBUtils.getRowCount(con, "SELECT COUNT(*) FROM gene"); int currentTranscriptCount = DBUtils.getRowCount(con, "SELECT COUNT(*) FROM transcript"); int currentExonCount = DBUtils.getRowCount(con, "SELECT COUNT(*) FROM exon"); int previousGeneCount = DBUtils.getRowCount(previousCon, "SELECT COUNT(*) FROM gene"); int previousTranscriptCount = DBUtils.getRowCount(previousCon, "SELECT COUNT(*) FROM transcript"); int previousExonCount = DBUtils.getRowCount(previousCon, "SELECT COUNT(*) FROM exon"); if (currentGeneCount != previousGeneCount || currentTranscriptCount != previousTranscriptCount || currentExonCount != previousExonCount) { ReportManager.problem(this, con, "Last geneset update entry is the same or older than the equivalent entry in the previous release and the number of genes, transcripts or exons has changed."); result = false; } } return result; } private boolean checkMetaKey(Connection con, String key, String s, String regexp) { if (regexp != null) { if (!s.matches(regexp)) { ReportManager.problem(this, con, key + " " + s + " is not in correct format - should match " + regexp); return false; } } return true; } private boolean checkDateFormat(Connection con, String key, String s) { int year = Integer.parseInt(s.substring(0, 4)); if (year < 2003 || year > 2050) { ReportManager.problem(this, con, "Year part of " + key + " (" + year + ") is incorrect"); return false; } int month = Integer.parseInt(s.substring(5, 7)); if (month < 1 || month > 12) { ReportManager.problem(this, con, "Month part of " + key + " (" + month + ") is incorrect"); return false; } return true; } private boolean checkGenebuildID(Connection con) { String gbid = DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='genebuild.id'"); logger.finest("genebuild.id from database: " + gbid); if (gbid == null || gbid.length() == 0) { ReportManager.problem(this, con, "No genebuild.id entry in meta table"); return false; } else if (!gbid.matches("[0-9]+")) { ReportManager.problem(this, con, "genebuild.id " + gbid + " is not numeric"); return false; } ReportManager.correct(this, con, "genebuild.id " + gbid + " is present and numeric"); return true; } /** * Check that at least some sort of genebuild.level-type key is present. */ private boolean checkBuildLevel(DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); String[] Tables = { "gene", "transcript", "exon", "repeat_feature", "dna_align_feature", "protein_align_feature", "simple_feature", "prediction_transcript", "prediction_exon" }; int exists = DBUtils.getRowCount(con, "SELECT COUNT(*) FROM meta where meta_key like '%build.level'"); if (exists == 0) { ReportManager.problem(this, con, "GB: No %build.level entries in the meta table - run ensembl/misc-scripts/meta_levels.pl"); } int count = 0; for (int i = 0; i < Tables.length; i++) { String Table = Tables[i]; int rows = DBUtils.getRowCount(con, "SELECT COUNT(*) FROM " + Table); int key = DBUtils.getRowCount(con, "SELECT COUNT(*) FROM meta WHERE meta_key = '" + Table + "build.level' "); int toplevel = DBUtils.getRowCount(con, "SELECT COUNT(*) FROM " + Table + " t, seq_region_attrib sra, attrib_type at WHERE t.seq_region_id = sra.seq_region_id AND sra.attrib_type_id = at.attrib_type_id AND at.code = 'toplevel' "); if (rows != 0) { if (key == 0) { if (rows == toplevel) { ReportManager.problem(this, con, "Table " + Table + " should have a toplevel flag - run ensembl/misc-scripts/meta_levels.pl"); } else { count++; } } else { if (rows != toplevel) { ReportManager.problem(this, con, "Table " + Table + " has some non toplevel regions, should not have a toplevel flag - run ensembl/misc-scripts/meta_levels.pl"); } else { count++; } } } else { if (key != 0) { ReportManager.problem(this, con, "Empty table " + Table + " should not have a toplevel flag - run ensembl/misc-scripts/meta_levels.pl"); } else { count++; } } } if (count == Tables.length) { ReportManager.correct(this, con, "Toplevel flags correctly set"); result = true; } return result; } /** * Check that the genebuild.method entry exists and has one of the allowed values. */ private boolean checkGenebuildMethod(DatabaseRegistryEntry dbre) { boolean result = true; // only valid for core databases if (dbre.getType() != DatabaseType.CORE) { return true; } String[] allowedMethods = { "full_genebuild", "projection_build", "import", "mixed_strategy_build" }; Connection con = dbre.getConnection(); String method = DBUtils.getRowColumnValue(con, "SELECT meta_value FROM meta WHERE meta_key='genebuild.method'"); if (method.equals("")) { ReportManager.problem(this, con, "No genebuild.method entry present in Meta table"); return false; } if (!Utils.stringInArray(method, allowedMethods, true)) { ReportManager.problem(this, con, "genebuild.method value " + method + " is not in list of allowed methods"); result = false; } else { ReportManager.correct(this, con, "genebuild.method " + method + " is valid"); } return result; } private boolean checkAssemblyAccessionUpdate(DatabaseRegistryEntry dbre) { boolean result = true; // only valid for core databases if (dbre.getType() != DatabaseType.CORE) { return true; } Connection con = dbre.getConnection(); String currentAssemblyAccession = DBUtils.getMetaValue(con, "assembly.accession"); String currentAssemblyName = DBUtils.getMetaValue(con, "assembly.name"); if (currentAssemblyAccession.equals("")) { ReportManager.problem(this, con, "No assembly.accession entry present in Meta table"); return false; } if (currentAssemblyName.equals("")) { ReportManager.problem(this, con, "No assembly.name entry present in Meta table"); return false; } DatabaseRegistryEntry sec = getEquivalentFromSecondaryServer(dbre); if (sec == null) { logger.warning("Can't get equivalent database for " + dbre.getName()); return true; } logger.finest("Equivalent database on secondary server is " + sec.getName()); Connection previousCon = sec.getConnection(); String previousAssemblyAccession = DBUtils.getMetaValue(previousCon, "assembly.accession"); String previousAssemblyName = DBUtils.getMetaValue(previousCon, "assembly.name"); long currentAssemblyChecksum = DBUtils.getChecksum(con, "assembly"); long previousAssemblyChecksum = DBUtils.getChecksum(previousCon, "assembly"); boolean assemblyChanged = false; boolean assemblyTableChanged = false; boolean assemblyExceptionTableChanged = false; if (currentAssemblyChecksum != previousAssemblyChecksum) { assemblyTableChanged = true; } else { if (dbre.getSpecies() != Species.HOMO_SAPIENS) { // compare assembly_exception tables (patches only) from each database try { Statement previousStmt = previousCon.createStatement(); Statement currentStmt = con.createStatement(); String sql = "SELECT * FROM assembly_exception WHERE exc_type LIKE ('PATCH_%') ORDER BY assembly_exception_id"; ResultSet previousRS = previousStmt.executeQuery(sql); ResultSet currentRS = currentStmt.executeQuery(sql); boolean assExSame = DBUtils.compareResultSets(currentRS, previousRS, this, "", false, false, "assembly_exception", false); currentRS.close(); previousRS.close(); currentStmt.close(); previousStmt.close(); assemblyExceptionTableChanged = !assExSame; } catch (SQLException e) { e.printStackTrace(); } } } assemblyChanged = assemblyTableChanged || assemblyExceptionTableChanged; if (assemblyChanged == previousAssemblyAccession.equals(currentAssemblyAccession) && previousAssemblyName.equals(currentAssemblyName) ) { result = false; String errorMessage = "assembly.accession and assembly.name values need to be updated when " + "the assembly table changes or new patches are added to the assembly exception table\n" + "previous assembly.accession: " + previousAssemblyAccession + " assembly.name: " + previousAssemblyName + " current assembly.accession: " + currentAssemblyAccession + " assembly.name: " + currentAssemblyName + "\n" + "assembly table changed:"; if (assemblyTableChanged) { errorMessage += " yes;"; } else { errorMessage += " no;"; } errorMessage += " assembly exception patches changed:"; if (assemblyExceptionTableChanged) { errorMessage += " yes"; } else { errorMessage += " no"; } ReportManager.problem(this, con, errorMessage); } if (result) { ReportManager.correct(this, con, "assembly.accession and assembly.name values are correct"); } return result; } /** * Check that all meta_values with meta_key 'repeat.analysis' reference analysis.logic_name */ private boolean checkRepeatAnalysis(DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); String[] repeatAnalyses = DBUtils.getColumnValues(con, "SELECT meta_value FROM meta LEFT JOIN analysis ON meta_value = logic_name WHERE meta_key = 'repeat.analysis' AND analysis_id IS NULL"); if (repeatAnalyses.length > 0) { ReportManager.problem(this, con, "The following values for meta_key repeat.analysis don't have a corresponding logic_name entry in the analysis table: " + Utils.arrayToString(repeatAnalyses,",") ); } else { ReportManager.correct(this, con, "All values for meta_key repeat.analysis have a corresponding logic_name entry in the analysis table"); } return result; } private boolean checkForSchemaPatchLineBreaks(DatabaseRegistryEntry dbre) { SqlTemplate t = DBUtils.getSqlTemplate(dbre); String metaKey = "patch"; String sql = "select meta_id from meta where meta_key =? and species_id IS NULL and meta_value like ?"; List<Integer> ids = t.queryForDefaultObjectList(sql, Integer.class, metaKey, "%\n%"); if(!ids.isEmpty()) { String idsJoined = Utils.listToString(ids, ","); String usefulSql = "select * from meta where meta_id IN ("+idsJoined+")"; String msg = String.format("The meta ids [%s] had values with linebreaks.\nUSEFUL SQL: %s", idsJoined, usefulSql); ReportManager.problem(this, dbre.getConnection(), msg); return false; } return true; } private boolean checkSample(DatabaseRegistryEntry dbre) { SqlTemplate t = DBUtils.getSqlTemplate(dbre); String metaKey = "sample.location_text"; String sql = "select meta_value from meta where meta_key = ?"; List<String> value = t.queryForDefaultObjectList(sql, String.class, metaKey); if (!value.isEmpty()) { String linkedKey = "sample.location_param"; String linkedSql = "select meta_value from meta where meta_key = ?"; List<String> linkedValue = t.queryForDefaultObjectList(linkedSql, String.class, linkedKey); if(!linkedValue.equals(value)) { ReportManager.problem(this, dbre.getConnection(), "Keys " + metaKey + " and " + linkedKey + " do not have same value"); return false; } } return true; } } // MetaValues
package org.ensembl.healthcheck.testcase.generic; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.ensembl.healthcheck.DatabaseRegistryEntry; import org.ensembl.healthcheck.DatabaseType; import org.ensembl.healthcheck.ReportManager; import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase; import org.ensembl.healthcheck.util.DBUtils; /** * Check Oligometrix xrefs: - that each chromosome has at least 1 Oligo xref * * Assumptions: oligo xrefs and transcripts are both in the default chromosome coordinate system. * * Note - currently disabled (doesn't apply to any dbs), may be migrated to eFG eventually. * */ public class OligoXrefs extends SingleDatabaseTestCase { // if a database has more than this number of seq_regions in the chromosome coordinate system, it's ignored private static final int MAX_CHROMOSOMES = 75; /** * Creates a new instance of OligoXrefs */ public OligoXrefs() { addToGroup("post_genebuild"); addToGroup("release"); addToGroup("core_xrefs"); setDescription("Check oligo xrefs"); setHintLongRunning(true); } /** * Note - currently disabled. */ public void types() { removeAppliesToType(DatabaseType.CORE); removeAppliesToType(DatabaseType.OTHERFEATURES); removeAppliesToType(DatabaseType.CDNA); removeAppliesToType(DatabaseType.VEGA); } /** * Check all chromosomes have oligo xrefs. * * Get a list of chromosomes, then check the number of Oligo xrefs associated with each one. Fail is any chromosome has 0 oligo * xrefs. * * @param dbre * The database to use. * @return true if the test passed. * */ public boolean run(DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); try { // Check if there are any Oligo features - if so there should be Oligo Xrefs if (getRowCount(con, "SELECT COUNT(*) FROM oligo_array") == 0) { logger.info(DBUtils.getShortDatabaseName(con) + " has no Oligo features, not checking for Oligo xrefs"); return true; } // find all chromosomes in default assembly coordinate system Map srID2name = new HashMap(); ResultSet rs = con.createStatement().executeQuery( "SELECT seq_region_id, s.name FROM seq_region s, coord_system c WHERE c.coord_system_id=s.coord_system_id AND c.name='chromosome' and attrib='default_version '"); while (rs.next()) srID2name.put(rs.getString(1), rs.getString(2)); rs.close(); if (srID2name.size() > MAX_CHROMOSOMES) { ReportManager.problem(this, con, "Database has more than " + MAX_CHROMOSOMES + " seq_regions in 'chromosome' coordinate system (actually " + srID2name.size() + ") - test skipped"); return false; } // Count the number of oligo xrefs for each chr Map srID2count = new HashMap(); // (Optimisation: faster to use "in list" of external_db_ids than SQL // join.) StringBuffer inList = new StringBuffer(); String[] exdbIDs = getColumnValues(con, "select external_db_id from external_db where db_name LIKE \'AFFY%\'"); for (int i = 0; i < exdbIDs.length; i++) { if (i > 0) inList.append(","); inList.append(exdbIDs[i]); } rs = con .createStatement() .executeQuery( "select seq_region_id, count(*) as count from transcript t, object_xref ox, xref x where t.transcript_id=ox.ensembl_id and ensembl_object_type='Transcript' and ox.xref_id=x.xref_id and x.external_db_id in (" + inList + ") GROUP BY seq_region_id"); while (rs.next()) srID2count.put(rs.getString("seq_region_id"), rs.getString("count")); rs.close(); // check every chr has >0 oligo xrefs. for (Iterator iter = srID2name.keySet().iterator(); iter.hasNext();) { String srID = (String) iter.next(); String name = (String) srID2name.get(srID); String label = name + " (seq_region_id=" + srID + ")"; long count = srID2count.containsKey(srID) ? Long.parseLong(srID2count.get(srID).toString()) : 0; if (count > 0) { ReportManager.correct(this, con, "Chromosome " + label + " has " + srID2count.get(srID) + " associated oligo xrefs."); } else { ReportManager.problem(this, con, "Chromosome " + label + " has no associated oligo xrefs."); result = false; } } } catch (SQLException se) { se.printStackTrace(); result = false; } return result; } // run } // OligoXrefs
package nyc.angus.wordgrid.solver; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.Set; import javax.annotation.Nonnull; import nyc.angus.wordgrid.dictionary.Dictionary; import com.google.common.base.Preconditions; /** * Provides potential solutions for WordBrain problems, provided a grid of characters, and the lengths of the words to * be found in the grid. */ public class WordGridSolver { /** * The dictionary of words that may exist in the grid. */ private final Dictionary dictionary; public WordGridSolver(@Nonnull final Dictionary dictionary) { Preconditions.checkNotNull(dictionary); this.dictionary = dictionary; } /** * Find all valid WordBrain words in the given grid that are the specified length. * * @param caseSensitiveGrid * Word grid. * @param wordLengths * The lengths of the words we are looking for. This is ordered, as some words may only become accessible * once one word is found and removed from the grid. * @return The list of word combinations that complete the grid. The sub-list contains the words that, used * together, complete the grid. */ public List<LinkedList<String>> findWords(final char[][] caseSensitiveGrid, final Queue<Integer> wordLengths) { Preconditions.checkNotNull(caseSensitiveGrid); Preconditions.checkArgument(caseSensitiveGrid.length > 0 || caseSensitiveGrid[0].length > 0); if (wordLengths == null || wordLengths.size() == 0) { return Collections.emptyList(); } final char[][] lowerCaseGrid = Grids.toLowerCase(caseSensitiveGrid); final List<LinkedList<String>> wordsFound = new LinkedList<>(); /* * Start looking for words from each position in the grid. */ for (int y = 0; y < lowerCaseGrid.length; y++) { for (int x = 0; x < lowerCaseGrid[0].length; x++) { // Start with no words seen, and empty string. wordsFound.addAll(findWord(lowerCaseGrid, x, y, "", new HashSet<>(), wordLengths)); } } return wordsFound; } /** * Recursively called to build up words. If a word is of the desired length, check if it is in the dictionary. * <p> * Starting with an empty string, it recursively calls out to the (up to) 8 characters next to the given character, * stopping where it has already used a character as part of the word, or at the edge of the grid. * <p> * When it finds a valid word, it moves on in another recursive call to {@link #findWords(char[][], Queue)} to find * another valid word, matching the next word length in <tt>lengthOfWord</tt>. * * @param grid * The word grid. * @param xPos * The current position in the grid, x axis. * @param yPos * The current position in the grid, y axis. * @param currentWord * The word being built up as part of this call. * @param positionsUsedInWord * Set of positions already seen. * @param wordLengthsRequired * The length of the word we are looking for. * * @return The list of word combinations that complete the grid. The sub-list contains the words that, used * together, complete the grid. */ private List<LinkedList<String>> findWord(final char[][] grid, final int xPos, final int yPos, @Nonnull final String currentWord, final Set<Position> positionsUsedInWord, final Queue<Integer> wordLengthsRequired) { // Check terminating conditions (co-ordinates off grid, grid position already used, or grid position empty): if (notAValidPosition(grid, xPos, yPos, positionsUsedInWord)) { return Collections.emptyList(); } final List<LinkedList<String>> solutions = new LinkedList<>(); final String newWord = currentWord + grid[yPos][xPos]; final Integer wordLengthRequired = wordLengthsRequired.peek(); if (newWord.length() == wordLengthRequired && dictionary.isWord(newWord)) { solutions.addAll(markSolutionAndStartNextWord(grid, xPos, yPos, positionsUsedInWord, wordLengthsRequired, newWord)); } else if (newWord.length() < wordLengthRequired && dictionary.isPrefix(newWord)) { solutions.addAll(findNextCharacterInWord(grid, xPos, yPos, positionsUsedInWord, wordLengthsRequired, newWord)); } return solutions; } /** * A grid position is not valid if one of the co-ordinates is off the grid (e.g. a negative co-ordinate), the grid * position has already been used for the current word, or the grid position does not contain a character (in which * case the ' ' char is found). */ private boolean notAValidPosition(final char[][] grid, final int xPos, final int yPos, final Set<Position> positionsUsedInWord) { return xPos < 0 || yPos < 0 || yPos >= grid.length || xPos >= grid[0].length || positionsUsedInWord.contains(new Position(xPos, yPos)) || grid[yPos][xPos] == ' '; } /** * The current word is not large enough, as we haven't reached the desired word size yet. Fan out the search by * recursively calling {@link #findWord(char[][], int, int, String, Set, Queue)}, adding every combination of * remaining characters adjacent to the current character. */ private List<LinkedList<String>> findNextCharacterInWord(final char[][] grid, final int xPos, final int yPos, final Set<Position> positionsUsedInWord, final Queue<Integer> wordLengthsRequired, final String word) { final Set<Position> newPosSeen = new HashSet<>(positionsUsedInWord); newPosSeen.add(new Position(xPos, yPos)); final List<LinkedList<String>> solutions = new LinkedList<>(); solutions.addAll(findWord(grid, xPos - 1, yPos, word, newPosSeen, wordLengthsRequired)); solutions.addAll(findWord(grid, xPos, yPos - 1, word, newPosSeen, wordLengthsRequired)); solutions.addAll(findWord(grid, xPos - 1, yPos - 1, word, newPosSeen, wordLengthsRequired)); solutions.addAll(findWord(grid, xPos + 1, yPos, word, newPosSeen, wordLengthsRequired)); solutions.addAll(findWord(grid, xPos, yPos + 1, word, newPosSeen, wordLengthsRequired)); solutions.addAll(findWord(grid, xPos + 1, yPos + 1, word, newPosSeen, wordLengthsRequired)); solutions.addAll(findWord(grid, xPos + 1, yPos - 1, word, newPosSeen, wordLengthsRequired)); solutions.addAll(findWord(grid, xPos - 1, yPos + 1, word, newPosSeen, wordLengthsRequired)); return solutions; } /** * We have found a valid word. * <p> * If there are no more words to find, add this word to a result list and return it. * <p> * If there are more valid words to find, remove the characters in the discovered word from the grid and recursively * call {@link #findWords(char[][], Queue)} to find the next word. * <p> * If these recursive calls terminate without finding the next required work, this discovered word can be ignored, * as it is not part of a complete solution. */ private List<LinkedList<String>> markSolutionAndStartNextWord(final char[][] grid, final int xPos, final int yPos, final Set<Position> positionsUsedInWord, final Queue<Integer> wordLengthsRequired, final String validWord) { final List<LinkedList<String>> solutions = new LinkedList<>(); /* * We've found a potential first word. Move on to second word. */ final Queue<Integer> newWordLengthsRequired = cloneQueue(wordLengthsRequired); newWordLengthsRequired.remove(); final LinkedList<String> resultSet = new LinkedList<>(); resultSet.add(validWord); Preconditions.checkArgument(validWord.length() == positionsUsedInWord.size() + 1); if (newWordLengthsRequired.isEmpty()) { // No more words to find after this. solutions.add(resultSet); } else { final char[][] updatedGrid = removeWordFromGrid(grid, xPos, yPos, positionsUsedInWord); solutions.addAll(startSearchForNextWord(updatedGrid, validWord, newWordLengthsRequired)); } return solutions; } /** * Start searching for the next word in the grid by removing characters used as part of the first word and * recursively calling {@link #findWords(char[][], Queue)} to begin the search anew. */ private List<LinkedList<String>> startSearchForNextWord(final char[][] grid, final String previouslyDiscoveredWord, final Queue<Integer> newWordLengthsRequired) { final List<LinkedList<String>> solutions = new LinkedList<>(); // Start searching again in new grid for the next word: final List<LinkedList<String>> nextWords = findWords(grid, newWordLengthsRequired); // If more words were found, add the whole result to the solution set: if (!nextWords.isEmpty() && !nextWords.get(0).isEmpty()) { for (final LinkedList<String> list : nextWords) { list.addFirst(previouslyDiscoveredWord); } solutions.addAll(nextWords); } return solutions; } /** * Create a copy of the grid and update it to remove the characters from the recently discovered grid. * <p> * A copy is made of the positions used to make this word, because sibling calls (in the recursive call structure) * to {@link #startSearchForNextWord(char[][], String, Queue)} can also find valid words and also try to update this * structure, which leads to an incorrect position being added. */ private char[][] removeWordFromGrid(final char[][] grid, final int xPos, final int yPos, final Set<Position> positionsUsedInWord) { // Mark current position as seen: final Set<Position> finalPositionsInWord = new HashSet<>(positionsUsedInWord); finalPositionsInWord.add(new Position(xPos, yPos)); // Remove the word found in this search: final char[][] updatedGrid = Grids.removeElementsAndApplyGravity(grid, finalPositionsInWord); return updatedGrid; } /** * Clone the provided queue. */ private Queue<Integer> cloneQueue(final Queue<Integer> wordLengthsRequired) { return new LinkedList<>(wordLengthsRequired); } }
package org.handwerkszeug.riak.http.rest; import static org.handwerkszeug.riak.util.Validation.notNull; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.node.ObjectNode; import org.handwerkszeug.riak.Markers; import org.handwerkszeug.riak.RiakException; import org.handwerkszeug.riak._; import org.handwerkszeug.riak.http.HttpRiakOperations; import org.handwerkszeug.riak.http.InputStreamHandler; import org.handwerkszeug.riak.http.LinkCondition; import org.handwerkszeug.riak.http.OutputStreamHandler; import org.handwerkszeug.riak.http.RiakHttpHeaders; import org.handwerkszeug.riak.mapreduce.DefaultMapReduceQuery; import org.handwerkszeug.riak.mapreduce.MapReduceQueryConstructor; import org.handwerkszeug.riak.mapreduce.MapReduceResponse; import org.handwerkszeug.riak.model.Bucket; import org.handwerkszeug.riak.model.DefaultRiakObject; import org.handwerkszeug.riak.model.GetOptions; import org.handwerkszeug.riak.model.KeyResponse; import org.handwerkszeug.riak.model.Link; import org.handwerkszeug.riak.model.Location; import org.handwerkszeug.riak.model.PutOptions; import org.handwerkszeug.riak.model.Quorum; import org.handwerkszeug.riak.model.RiakFuture; import org.handwerkszeug.riak.model.RiakObject; import org.handwerkszeug.riak.model.RiakResponse; import org.handwerkszeug.riak.nls.Messages; import org.handwerkszeug.riak.op.RiakResponseHandler; import org.handwerkszeug.riak.op.SiblingHandler; import org.handwerkszeug.riak.op.internal.CompletionSupport; import org.handwerkszeug.riak.op.internal.IncomprehensibleProtocolException; import org.handwerkszeug.riak.util.HttpUtil; import org.handwerkszeug.riak.util.JsonUtil; import org.handwerkszeug.riak.util.NettyUtil; import org.handwerkszeug.riak.util.StringUtil; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBufferInputStream; import org.jboss.netty.buffer.ChannelBufferOutputStream; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.handler.codec.http.DefaultHttpRequest; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpMessage; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.codec.http.HttpVersion; import org.jboss.netty.handler.codec.http.PartMessage; import org.jboss.netty.handler.codec.http.QueryStringEncoder; import org.jboss.netty.util.CharsetUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author taichi */ public class RestRiakOperations implements HttpRiakOperations { static final Logger LOG = LoggerFactory.getLogger(RestRiakOperations.class); String host; String riakPath; CompletionSupport support; String clientId; ObjectMapper objectMapper = new ObjectMapper(); public RestRiakOperations(String host, String riakPath, Channel channel) { notNull(host, "host"); notNull(channel, "channel"); this.host = removeSlashIfNeed(host); this.support = new CompletionSupport(channel); this.riakPath = riakPath; } protected String removeSlashIfNeed(String uri) { return uri.endsWith("/") ? uri.substring(0, uri.length() - 1) : uri; } @Override public RiakFuture ping(final RiakResponseHandler<String> handler) { notNull(handler, "handler"); HttpRequest request = build("/ping", HttpMethod.GET); final String procedure = "ping"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; if (NettyUtil.isSuccessful(response.getStatus())) { handler.handle(support.newResponse("pong")); return true; } } throw new IncomprehensibleProtocolException(procedure); } }); } @Override public void setClientId(String clientId) { this.clientId = clientId; } @Override public String getClientId() { return this.clientId; } protected HttpRequest build(String path, HttpMethod method) { return build(this.host + "/" + this.riakPath, path, method); } protected HttpRequest build(String app, String path, HttpMethod method) { try { URI uri = new URI(app + path); LOG.debug(Markers.BOUNDARY, uri.toASCIIString()); HttpRequest request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, method, uri.toASCIIString()); request.setHeader(HttpHeaders.Names.HOST, uri.getHost()); request.setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE); if (StringUtil.isEmpty(this.clientId) == false) { request.setHeader(RiakHttpHeaders.CLIENT_ID, this.clientId); } return request; } catch (URISyntaxException e) { throw new RiakException(e); } } @Override public RiakFuture listBuckets( final RiakResponseHandler<List<String>> handler) { notNull(handler, "handler"); HttpRequest request = build("?buckets=true", HttpMethod.GET); final String procedure = "listBuckets"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; ChannelBuffer buffer = response.getContent(); ObjectNode node = to(buffer); if (node != null) { List<String> list = JsonUtil.to(node .get("buckets")); handler.handle(support.newResponse(list)); return true; } } throw new IncomprehensibleProtocolException(procedure); } }); } @SuppressWarnings("unchecked") <T extends JsonNode> T to(ChannelBuffer buffer, T... t) { try { if (buffer != null && buffer.readable()) { JsonNode node = this.objectMapper .readTree(new ChannelBufferInputStream(buffer)); Class<?> clazz = t.getClass().getComponentType(); if (clazz.isAssignableFrom(node.getClass())) { return (T) node; } } } catch (IOException e) { LOG.error(Markers.BOUNDARY, e.getMessage(), e); throw new RiakException(e); } return null; } @Override public RiakFuture listKeys(String bucket, final RiakResponseHandler<KeyResponse> handler) { notNull(bucket, "bucket"); notNull(handler, "handler"); HttpRequest request = build("/" + bucket + "?props=false&keys=stream", HttpMethod.GET); final String procedure = "listKeys"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; if (NettyUtil.isSuccessful(response.getStatus())) { boolean done = response.isChunked() == false; if (done) { _listKeys(response.getContent(), handler); } return done; } } else if (receive instanceof HttpChunk) { HttpChunk chunk = (HttpChunk) receive; boolean done = chunk.isLast(); if (done == false) { _listKeys(chunk.getContent(), handler); } return done; } throw new IncomprehensibleProtocolException(procedure); } }); } protected void _listKeys(ChannelBuffer buffer, final RiakResponseHandler<KeyResponse> handler) throws Exception { ObjectNode on = to(buffer); if (on != null) { JsonNode node = on.get("keys"); if (node != null) { List<String> list = JsonUtil.to(node); KeyResponse kr = new KeyResponse(list, list.isEmpty()); handler.handle(support.newResponse(kr)); } } } @Override public RiakFuture getBucket(String bucket, final RiakResponseHandler<Bucket> handler) { notNull(bucket, "bucket"); notNull(handler, "handler"); HttpRequest request = build("/" + bucket + "?props=true", HttpMethod.GET); final String procedure = "getBucket"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; if (NettyUtil.isSuccessful(response.getStatus())) { BucketHolder holder = objectMapper.readValue( new ChannelBufferInputStream(response .getContent()), BucketHolder.class); handler.handle(support .newResponse(holder.props)); return true; } } throw new IncomprehensibleProtocolException(procedure); } }); } @Override public RiakFuture setBucket(Bucket bucket, final RiakResponseHandler<_> handler) { notNull(bucket, "bucket"); notNull(handler, "handler"); HttpRequest request = buildSetBucketRequest(bucket); final String procedure = "setBucket"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; if (NettyUtil.isSuccessful(response.getStatus())) { handler.handle(support.newResponse()); return true; } } throw new IncomprehensibleProtocolException(procedure); } }); } protected HttpRequest buildSetBucketRequest(Bucket bucket) { try { BucketHolder holder = new BucketHolder(); holder.props = bucket; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); OutputStream out = new ChannelBufferOutputStream(buffer); objectMapper.writeValue(out, holder); HttpRequest request = build("/" + bucket.getName(), HttpMethod.PUT); request.setHeader(HttpHeaders.Names.CONTENT_LENGTH, buffer.readableBytes()); request.setHeader(HttpHeaders.Names.CONTENT_TYPE, RiakHttpHeaders.CONTENT_JSON); request.setHeader(HttpHeaders.Names.ACCEPT, RiakHttpHeaders.CONTENT_JSON); request.setContent(buffer); return request; } catch (IOException e) { throw new RiakException(e); } } @Override public RiakFuture get(Location location, RiakResponseHandler<RiakObject<byte[]>> handler) { notNull(location, "location"); notNull(handler, "handler"); return getSingle(buildGetRequst(location), location, handler); } @Override public RiakFuture get(Location location, GetOptions options, RiakResponseHandler<RiakObject<byte[]>> handler) { notNull(location, "location"); notNull(options, "options"); notNull(handler, "handler"); return getSingle(buildGetRequst(location, options), location, handler); } protected HttpRequest buildGetRequst(Location location) { HttpRequest request = build( "/" + location.getBucket() + "/" + location.getKey(), HttpMethod.GET); return request; } protected HttpRequest buildGetRequst(Location location, GetOptions options) { HttpRequest request = buildGetRequst(location); QueryStringEncoder params = new QueryStringEncoder(request.getUri()); if (options.getReadQuorum() != null) { params.addParam("r", options.getReadQuorum().getString()); } // TODO PR support. if (StringUtil.isEmpty(options.getIfNoneMatch()) == false) { request.setHeader(HttpHeaders.Names.IF_NONE_MATCH, options.getIfNoneMatch()); } if (StringUtil.isEmpty(options.getIfMatch()) == false) { request.setHeader(HttpHeaders.Names.IF_MATCH, options.getIfMatch()); } if (options.getIfModifiedSince() != null) { request.setHeader(HttpHeaders.Names.IF_MODIFIED_SINCE, HttpUtil.format(options.getIfModifiedSince())); } request.setUri(params.toString()); return request; } protected RiakFuture getSingle(HttpRequest request, final Location location, final RiakResponseHandler<RiakObject<byte[]>> handler) { String procedure = "get/single"; return handle(procedure, request, handler, new NettyUtil.ChunkedMessageAggregator(procedure, new NettyUtil.ChunkedMessageHandler() { @Override public void handle(HttpResponse response, ChannelBuffer buffer) throws Exception { RiakObject<byte[]> ro = convert(response, buffer, location); handler.handle(support.newResponse(ro)); } })); } protected RiakObject<byte[]> convert(HttpMessage headers, ChannelBuffer buffer, Location location) { DefaultRiakObject ro = new DefaultRiakObject(location); ro.setContent(buffer.array()); ro.setVectorClock(headers.getHeader(RiakHttpHeaders.VECTOR_CLOCK)); ro.setContentType(headers.getHeader(HttpHeaders.Names.CONTENT_TYPE)); // NOP ro.setCharset(charset); ro.setContentEncoding(headers .getHeader(HttpHeaders.Names.CONTENT_ENCODING)); // NOP ro.setVtag(vtag); List<String> links = headers.getHeaders(RiakHttpHeaders.LINK); ro.setLinks(parse(links)); String lastmod = headers.getHeader(HttpHeaders.Names.LAST_MODIFIED); if (StringUtil.isEmpty(lastmod) == false) { Date d = HttpUtil.parse(lastmod); ro.setLastModified(d); if (LOG.isDebugEnabled()) { LOG.debug(Markers.DETAIL, Messages.LastModified, lastmod); } } Map<String, String> map = new HashMap<String, String>(); for (String name : headers.getHeaderNames()) { if (RiakHttpHeaders.isUsermeta(name)) { String key = RiakHttpHeaders.fromUsermeta(name); map.put(key, headers.getHeader(name)); } } ro.setUserMetadata(map); return ro; } static final Pattern LINK_PATTERN = Pattern .compile("</\\w+/(\\w+)/(\\w+)>;\\s+riaktag=\"([^\"\\r\\n]+)\""); static final int LINK_BUCKET = 1; static final int LINK_KEY = 2; static final int LINK_TAG = 3; protected List<Link> parse(List<String> links) { List<Link> result = new ArrayList<Link>(); for (String raw : links) { Matcher m = LINK_PATTERN.matcher(raw); while (m.find()) { String b = m.group(LINK_BUCKET); String k = m.group(LINK_KEY); String t = m.group(LINK_TAG); if (b != null && k != null && t != null) { Link l = new Link(new Location(b, k), t); result.add(l); } } } return result; } @Override public RiakFuture get(final Location location, GetOptions options, final SiblingHandler handler) { notNull(location, "location"); notNull(options, "options"); notNull(handler, "handler"); HttpRequest request = buildGetRequst(location, options); request.setHeader(HttpHeaders.Names.ACCEPT, RiakHttpHeaders.MULTI_PART); final String procedure = "get/sibling"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { String vclock; @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; vclock = response .getHeader(RiakHttpHeaders.VECTOR_CLOCK); handler.begin(); return false; } else if (receive instanceof PartMessage) { PartMessage part = (PartMessage) receive; boolean done = part.isLast(); part.setHeader(RiakHttpHeaders.VECTOR_CLOCK, vclock); if (done) { handler.end(); } else { RiakObject<byte[]> ro = convert(part, part.getContent(), location); handler.handle(support.newResponse(ro)); } return done; } throw new IncomprehensibleProtocolException(procedure); } }); } @Override public RiakFuture put(RiakObject<byte[]> content, final RiakResponseHandler<_> handler) { notNull(content, "content"); notNull(handler, "handler"); HttpRequest request = buildPutRequest(content); final String procedure = "put"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; HttpResponseStatus status = response.getStatus(); if (NettyUtil.isSuccessful(status)) { handler.handle(support.newResponse()); return true; } } throw new IncomprehensibleProtocolException(procedure); } }); } protected HttpRequest buildPutRequest(RiakObject<byte[]> content) { Location location = content.getLocation(); HttpRequest request = build( "/" + location.getBucket() + "/" + location.getKey(), HttpMethod.PUT); merge(request, content); return request; } protected void merge(HttpRequest request, RiakObject<byte[]> content) { if (StringUtil.isEmpty(content.getVectorClock()) == false) { request.setHeader(RiakHttpHeaders.VECTOR_CLOCK, content.getVectorClock()); } ChannelBuffer buffer = ChannelBuffers.wrappedBuffer(content .getContent()); request.setHeader(HttpHeaders.Names.CONTENT_LENGTH, buffer.readableBytes()); request.setContent(buffer); if (StringUtil.isEmpty(content.getContentType()) == false) { request.setHeader(HttpHeaders.Names.CONTENT_TYPE, content.getContentType()); } // NOP content.getCharset(); if (StringUtil.isEmpty(content.getContentEncoding()) == false) { request.setHeader(HttpHeaders.Names.CONTENT_ENCODING, content.getContentEncoding()); } // NOP content.getVtag(); if ((content.getLinks() != null) && (content.getLinks().isEmpty() == false)) { addLinkHeader(request, content); } if (content.getLastModified() != null) { request.setHeader(HttpHeaders.Names.LAST_MODIFIED, HttpUtil.format(content.getLastModified())); } if ((content.getUserMetadata() != null) && (content.getUserMetadata().isEmpty() == false)) { Map<String, String> map = content.getUserMetadata(); for (String key : map.keySet()) { request.setHeader(RiakHttpHeaders.toUsermeta(key), map.get(key)); } } } protected void addLinkHeader(HttpRequest request, RiakObject<byte[]> content) { StringBuilder stb = new StringBuilder(); for (Link link : content.getLinks()) { if (0 < stb.length()) { stb.append(", "); } stb.append('<'); stb.append(this.riakPath); stb.append('/'); stb.append(link.getLocation().getBucket()); stb.append('/'); stb.append(link.getLocation().getKey()); stb.append(">; riaktag=\""); stb.append(link.getTag()); stb.append('"'); // MochiWeb has problem of too long header ? if (2000 < stb.length()) { request.addHeader(RiakHttpHeaders.LINK, stb.toString()); stb = new StringBuilder(); } } if (0 < stb.length()) { request.addHeader(RiakHttpHeaders.LINK, stb.toString()); } } /** * if returning body has sibling then call get with silibling call * automatically. */ @Override public RiakFuture put(final RiakObject<byte[]> content, final PutOptions options, final SiblingHandler handler) { notNull(content, "content"); notNull(options, "options"); notNull(handler, "handler"); HttpRequest request = buildPutRequest(content, options); final String procedure = "put/sibling"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; if (NettyUtil.isSuccessful(response.getStatus())) { try { handler.begin(); RiakObject<byte[]> ro = convert(response, response.getContent(), content.getLocation()); handler.handle(support.newResponse(ro)); } finally { handler.end(); } } else if (response.getStatus().getCode() == 300) { dispatchToGetSibling(content.getLocation(), options, handler); } return true; } throw new IncomprehensibleProtocolException(procedure); } }); } protected HttpRequest buildPutRequest(RiakObject<byte[]> content, PutOptions options) { HttpRequest request = buildPutRequest(content); QueryStringEncoder params = to(options, request); request.setUri(params.toString()); return request; } protected QueryStringEncoder to(PutOptions options, HttpRequest request) { QueryStringEncoder params = new QueryStringEncoder(request.getUri()); if (options.getReadQuorum() != null) { // PBC-API does't support this parameter. why not? params.addParam("r", options.getReadQuorum().getString()); } if (options.getWriteQuorum() != null) { params.addParam("w", options.getWriteQuorum().getString()); } if (options.getDurableWriteQuorum() != null) { params.addParam("dw", options.getDurableWriteQuorum().getString()); } if (options.getReturnBody()) { params.addParam("returnbody", String.valueOf(options.getReturnBody())); } return params; } protected void dispatchToGetSibling(Location location, final PutOptions options, SiblingHandler handler) { get(location, new GetOptions() { @Override public Quorum getReadQuorum() { return options.getReadQuorum(); } @Override public String getIfNoneMatch() { return options.getIfNoneMatch(); } @Override public String getIfMatch() { return options.getIfMatch(); } @Override public Date getIfModifiedSince() { return options.getIfModifiedSince(); } }, handler); } @Override public RiakFuture post(String bucket, RiakObject<byte[]> content, PutOptions options, RiakResponseHandler<RiakObject<byte[]>> handler) { // TODO Auto-generated method stub return null; } @Override public RiakFuture delete(Location location, final RiakResponseHandler<_> handler) { notNull(location, "location"); notNull(handler, "handler"); HttpRequest request = buildDeleteRequest(location); return _delete("delete", handler, request); } protected HttpRequest buildDeleteRequest(Location location) { HttpRequest request = build( "/" + location.getBucket() + "/" + location.getKey(), HttpMethod.DELETE); return request; } protected RiakFuture _delete(String name, final RiakResponseHandler<_> handler, HttpRequest request) { final String procedure = name; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; if (NettyUtil.isSuccessful(response.getStatus())) { handler.handle(support.newResponse()); return true; } } throw new IncomprehensibleProtocolException(procedure); } }); } @Override public RiakFuture delete(Location location, Quorum readWrite, RiakResponseHandler<_> handler) { notNull(location, "location"); notNull(readWrite, "readWrite"); notNull(handler, "handler"); HttpRequest request = buildDeleteRequest(location, readWrite); return _delete("delete/quorum", handler, request); } protected HttpRequest buildDeleteRequest(Location location, Quorum readWrite) { HttpRequest request = buildDeleteRequest(location); QueryStringEncoder params = new QueryStringEncoder(request.getUri()); params.addParam("rw", readWrite.getString()); request.setUri(params.toString()); return request; } @Override public RiakFuture mapReduce(MapReduceQueryConstructor constructor, RiakResponseHandler<MapReduceResponse> handler) { notNull(constructor, "constructor"); notNull(handler, "handler"); DefaultMapReduceQuery query = new DefaultMapReduceQuery(); constructor.cunstruct(query); HttpRequest request = buildMapReduceRequest(); ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(1024); query.prepare(new ChannelBufferOutputStream(buffer)); HttpHeaders.setContentLength(request, buffer.readableBytes()); request.setContent(buffer); return mapReduce(request, handler); } protected RiakFuture mapReduce(HttpRequest request, final RiakResponseHandler<MapReduceResponse> handler) { final String procedure = "mapReduce"; return handle(procedure, request, handler, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { return false; } else if (receive instanceof HttpChunk) { HttpChunk chunk = (HttpChunk) receive; boolean done = chunk.isLast(); ObjectNode node = to(chunk.getContent()); MapReduceResponse response = new RestMapReduceResponse( node, done); handler.handle(support.newResponse(response)); return done; } throw new IncomprehensibleProtocolException(procedure); } }); } protected HttpRequest buildMapReduceRequest() { HttpRequest request = build(this.host, "/mapred?chunked=true", HttpMethod.POST); request.setHeader(HttpHeaders.Names.CONTENT_TYPE, RiakHttpHeaders.CONTENT_JSON); return request; } @Override public RiakFuture mapReduce(String rawJson, RiakResponseHandler<MapReduceResponse> handler) { notNull(rawJson, "rawJson"); notNull(handler, "handler"); HttpRequest request = buildMapReduceRequest(); HttpHeaders.setContentLength(request, rawJson.length()); request.setContent(ChannelBuffers.wrappedBuffer(rawJson.getBytes())); return mapReduce(request, handler); } @Override public RiakFuture getStream(String key, GetOptions options, InputStreamHandler handler) { // TODO Auto-generated method stub return null; } @Override public RiakFuture putStream(RiakObject<OutputStreamHandler> content, RiakResponseHandler<String> handler) { // TODO Auto-generated method stub return null; } @Override public RiakFuture walk(Location walkbegin, List<LinkCondition> conditions, RiakResponseHandler<List<RiakObject<byte[]>>> handler) { // TODO Auto-generated method stub return null; } @Override public RiakFuture getStats(RiakResponseHandler<ObjectNode> handler) { // TODO Auto-generated method stub return null; } protected <T> RiakFuture handle(final String name, Object send, final RiakResponseHandler<T> users, final NettyUtil.MessageHandler internal) { return this.support.handle(name, send, users, new NettyUtil.MessageHandler() { @Override public boolean handle(Object receive) throws Exception { if (receive instanceof HttpResponse) { HttpResponse response = (HttpResponse) receive; if (NettyUtil.isError(response.getStatus())) { users.onError(new RestErrorResponse(response)); return true; } } return internal.handle(receive); } }); } class RestErrorResponse implements RiakResponse { final HttpResponse master; public RestErrorResponse(HttpResponse master) { this.master = master; } @Override public int getResponseCode() { return this.master.getStatus().getCode(); } @Override public String getMessage() { ChannelBuffer content = this.master.getContent(); if (content.readable()) { return content.toString(CharsetUtil.UTF_8); } return ""; } @Override public void operationComplete() { support.complete(); } } }
package org.objectweb.proactive.core.body.future; import org.objectweb.proactive.Body; import org.objectweb.proactive.core.ProActiveRuntimeException; import org.objectweb.proactive.core.UniqueID; import org.objectweb.proactive.core.body.LocalBodyStore; import org.objectweb.proactive.core.body.UniversalBody; import org.objectweb.proactive.core.body.reply.Reply; import org.objectweb.proactive.core.body.reply.ReplyImpl; import org.objectweb.proactive.core.mop.Utils; import org.objectweb.proactive.core.util.ProActiveProperties; public class FuturePool extends Object implements java.io.Serializable { protected boolean newState; // table of future and ACs private FutureMap futures; // ID of the body corresponding to this futurePool private UniqueID ownerBody; // Active queue of AC services private transient ActiveACQueue queueAC; // toggle for enabling or disabling automatic continuation private boolean acEnabled; // table used for storing values which arrive in the futurePool BEFORE the registration // of its corresponding future. private java.util.HashMap valuesForFutures; public FuturePool() { futures = new FutureMap(); valuesForFutures = new java.util.HashMap(); this.newState = false; if (ProActiveProperties.getACState().equals("enable")) this.acEnabled = true; else this.acEnabled = false; if (acEnabled) { queueAC = new ActiveACQueue(); queueAC.start(); } } // this table is used to register destination before sending. // So, a future could retreive its destination during serialization // this table indexed by the thread which perform the registration. static private java.util.Hashtable bodyDestination; // to register in the table static public void registerBodyDestination(UniversalBody dest) { bodyDestination.put(Thread.currentThread(), dest); } // to clear an entry in the table static public void removeBodyDestination() { bodyDestination.remove(Thread.currentThread()); } // to get a destination static public UniversalBody getBodyDestination() { return (UniversalBody) (bodyDestination.get(Thread.currentThread())); } // this table is used to register deserialized futures after receive // So, futures to add in the local futurePool could be retreived static private java.util.Hashtable incomingFutures; // to register an incoming future in the table public static void registerIncomingFuture(Future f) { java.util.ArrayList listOfFutures = (java.util.ArrayList) incomingFutures.get(Thread.currentThread()); if (listOfFutures != null) { listOfFutures.add(f); } else { java.util.ArrayList newListOfFutures = new java.util.ArrayList(); newListOfFutures.add(f); incomingFutures.put(Thread.currentThread(), newListOfFutures); } } // to remove an entry from the table static public void removeIncomingFutures() { incomingFutures.remove(Thread.currentThread()); } // to get a list of incomingFutures static public java.util.ArrayList getIncomingFutures() { return (java.util.ArrayList) (incomingFutures.get(Thread.currentThread())); } // static init block static { bodyDestination = new java.util.Hashtable(); incomingFutures = new java.util.Hashtable(); } /** * Setter of the ID of the body corresonding to this FuturePool * @param i ID of the owner body. */ public void setOwnerBody(UniqueID i) { ownerBody = i; } /** * Getter of the ID of the body corresonding to this FuturePool */ public UniqueID getOwnerBody() { return ownerBody; } /** * To enable the automatic continuation behaviour for all futures in * this FuturePool * */ public void enableAC() { this.queueAC = new ActiveACQueue(); this.queueAC.start(); this.acEnabled = true; } /** * To disable the automatic continuation behaviour for all futures in * this FuturePool * */ public void disableAC() { this.acEnabled = false; this.queueAC.killMe(); this.queueAC = null; } /** * Method called when a reply is recevied, ie a value is available for a future. * This method perform local futures update, and put an ACService in the activeACqueue. * @param id sequence id of the future to update * @param creatorID ID of the body creator of the future to update * @param result value to update with the futures */ public synchronized void receiveFutureValue(long id, UniqueID creatorID, Object result) throws java.io.IOException { // get all aiwated futures java.util.ArrayList futuresToUpdate = futures.getFuturesToUpdate(id, creatorID); if (futuresToUpdate != null) { Future future = (Future) (futuresToUpdate.get(0)); if (future != null) { future.receiveReply(result); } // if there are more than one future to update, we "give" deep copy // of the result to the other futures to respect ProActive model // We use here the migration tag to perform a simple serialization (ie // without continuation side-effects) setMigrationTag(); for (int i = 1; i < futuresToUpdate.size(); i++) { Future otherFuture = (Future) (futuresToUpdate.get(i)); otherFuture.receiveReply(Utils.makeDeepCopy(result)); } unsetMigrationTag(); stateChange(); // 2) create and put ACservices if (acEnabled) { java.util.ArrayList bodiesToContinue = futures.getAutomaticContinuation(id, creatorID); if ((bodiesToContinue != null) && (bodiesToContinue.size() != 0)) { queueAC.addACRequest(new ACService(bodiesToContinue, new ReplyImpl(creatorID, id, null, result))); } } // 3) Remove futures from the futureMap futures.removeFutures(id, creatorID); } else { // we have to store the result until future arrive this.valuesForFutures.put(""+id+creatorID, result); } } /** * To put a future in the FutureMap * @param id sequence id of the future * @param creatorID UniqueID of the body which creates futureObject * @param futureObject future to register */ public synchronized void receiveFuture(Future futureObject) { futureObject.setSenderID(ownerBody); futures.receiveFuture(futureObject); long id = futureObject.getID(); UniqueID creatorID = futureObject.getCreatorID(); if (valuesForFutures.get(""+id+creatorID) != null) { try { this.receiveFutureValue(id, creatorID, valuesForFutures.remove(""+id+creatorID)); } catch (java.io.IOException e) { } } } /** * To add an automatic contiunation, ie a destination body, for a particular future. * @param id sequence id of the corresponding future * @param creatorID UniqueID of the body which creates futureObject * @param bodyDest body destination of this continuation */ public void addAutomaticContinuation(long id, UniqueID creatorID, UniversalBody bodyDest) { futures.addAutomaticContinuation(id, creatorID, bodyDest); } public synchronized void waitForReply() { this.newState = false; while (!newState) { try { wait(); } catch (InterruptedException e) { e.printStackTrace(); } } } /** * To register a destination before sending a reques or a reply * Registration key is the calling thread. */ public void registerDestination(UniversalBody dest){ if (acEnabled) FuturePool.registerBodyDestination(dest); } /** * To clear registred destination for the calling thread. */ public void removeDestination(){ if (acEnabled) FuturePool.removeBodyDestination(); } public void setMigrationTag() { futures.setMigrationTag(); } public void unsetMigrationTag() { futures.unsetMigrationTag(); } private void stateChange() { this.newState = true; notifyAll(); } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { setMigrationTag(); out.defaultWriteObject(); if (acEnabled) { // send the queue of AC requests out.writeObject(queueAC.getQueue()); // stop the ActiveQueue thread queueAC.killMe(); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { in.defaultReadObject(); unsetMigrationTag(); if (acEnabled) { // create a new ActiveACQueue java.util.ArrayList queue = (java.util.ArrayList) (in.readObject()); queueAC = new ActiveACQueue(queue); queueAC.start(); } } /** * Active Queue for AC. This queue has his own thread to perform ACservices * available in the queue. This thread is compliant with migration by using * the threadStore of the body correponding to this FutureMap. * Note that the ACServices are served in FIFO manner. * @see ACservice */ private class ActiveACQueue extends Thread { private java.util.ArrayList queue; private int counter; private boolean kill; public ActiveACQueue() { queue = new java.util.ArrayList(); counter = 0; kill = false; this.setName("Thread for AC"); } public ActiveACQueue(java.util.ArrayList queue) { this.queue = queue; counter = queue.size(); kill = false; this.setName("Thread for AC"); } /** * return the current queue of ACServices to perform */ public java.util.ArrayList getQueue() { return queue; } /** * Add a ACservice in the active queue. */ public synchronized void addACRequest(ACService r) { queue.add(r); counter++; notifyAll(); } /** * Return the oldest request in queue and remove it from the queue */ public synchronized ACService removeACRequest() { counter return (ACService) (queue.remove(0)); } /** * To stop the thread. */ public synchronized void killMe() { kill = true; notifyAll(); } public void run() { // get a reference on the owner body // try until it's not null because deserialization of the body // may be not finished when we restart the thread. Body owner = null; while (owner == null) { owner = LocalBodyStore.getInstance().getLocalBody(ownerBody); // it's a halfbody... if (owner == null) owner = LocalBodyStore.getInstance().getLocalHalfBody(ownerBody); } while (true) { // if there is no AC to do, wait... waitForAC(); if (kill) break; // there are ACs to do ! try { // enter in the threadStore owner.enterInThreadStore(); // if body has migrated, kill the thread if (kill) break; ACService toDo = this.removeACRequest(); if (toDo != null) { toDo.doAutomaticContinuation(); } // exit from the threadStore owner.exitFromThreadStore(); } catch (Exception e2) { // to unblock active object owner.exitFromThreadStore(); throw new ProActiveRuntimeException("Error while sending reply for AC ", e2); } } } // synchronized wait on ACRequest queue private synchronized void waitForAC() { try { while ((counter == 0) && !kill) { wait(); } } catch (InterruptedException e) { e.printStackTrace(); } } } /** * A simple object for a request for an automatic continuation * @see ActiveACQueue */ private class ACService implements java.io.Serializable { // bodies that have to be updated private java.util.ArrayList dests; // reply to send private Reply reply; public ACService(java.util.ArrayList dests, Reply reply) { this.dests = dests; this.reply = reply; } public void doAutomaticContinuation() throws java.io.IOException { if (dests != null) { for (int i = 0; i < dests.size(); i++) { UniversalBody dest = (UniversalBody) (dests.get(i)); registerDestination(dest); reply.send(dest); removeDestination(); } } } } //ACService }
package org.objectweb.proactive.core.util; import org.apache.log4j.Logger; import java.util.Iterator; public class CircularArrayList extends java.util.AbstractList implements java.util.List, java.io.Serializable { static Logger logger = Logger.getLogger(CircularArrayList.class.getName()); private static final int DEFAULT_SIZE = 5; protected Object[] array; // head points to the first logical element in the array, and // tail points to the element following the last. This means // that the list is empty when head == tail. It also means // that the array array has to have an extra space in it. protected int head = 0; // head points to the first logical element in the array, and // tail points to the element following the last. This means // that the list is empty when head == tail. It also means // that the array array has to have an extra space in it. protected int tail = 0; // Strictly speaking, we don't need to keep a handle to size, // as it can be calculated programmatically, but keeping it // makes the algorithms faster. protected int size = 0; public CircularArrayList() { this(DEFAULT_SIZE); } public CircularArrayList(int size) { array = new Object[size]; } public CircularArrayList(java.util.Collection c) { tail = c.size(); array = new Object[c.size()]; c.toArray(array); } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("CircularArray size="); sb.append(size); sb.append("\n"); for (int i = 0; i < size; i++) { sb.append("["); sb.append(convert(i)); sb.append("]=>"); sb.append(array[convert(i)]); sb.append(", "); } sb.append("\n"); return sb.toString(); } public static void main(String[] args) { CircularArrayList c = new CircularArrayList(5); c.add(0, new Integer(8)); logger.info(c.toString()); c.add(0, new Integer(7)); logger.info(c.toString()); c.add(0, new Integer(6)); logger.info(c.toString()); c.add(0, new Integer(5)); logger.info(c.toString()); c.add(0, new Integer(4)); logger.info(c.toString()); c.add(0, new Integer(3)); logger.info(c.toString()); c.add(0, new Integer(2)); logger.info(c.toString()); c.add(0, new Integer(1)); logger.info(c.toString()); c.add(0, new Integer(0)); logger.info(c.toString()); } public boolean isEmpty() { return head == tail; // or size == 0 } // We use this method to ensure that the capacity of the // list will suffice for the number of elements we want to // insert. If it is too small, we make a new, bigger array // and copy the old elements in. public void ensureCapacity(int minCapacity) { int oldCapacity = array.length; if (minCapacity > oldCapacity) { int newCapacity = ((oldCapacity * 3) / 2) + 1; if (newCapacity < minCapacity) { newCapacity = minCapacity; } Object[] newData = new Object[newCapacity]; toArray(newData); tail = size; head = 0; array = newData; } } public int size() { // the size can also be worked out each time as: // (tail + array.length - head) % array.length return size; } public boolean contains(Object elem) { return indexOf(elem) >= 0; } public int indexOf(Object elem) { if (elem == null) { for (int i = 0; i < size; i++) if (array[convert(i)] == null) { return i; } } else { for (int i = 0; i < size; i++) if (elem.equals(array[convert(i)])) { return i; } } return -1; } public int lastIndexOf(Object elem) { if (elem == null) { for (int i = size - 1; i >= 0; i if (array[convert(i)] == null) { return i; } } else { for (int i = size - 1; i >= 0; i if (elem.equals(array[convert(i)])) { return i; } } return -1; } public Object[] toArray() { return toArray(new Object[size]); } public Object[] toArray(Object[] a) { //System.out.println("head="+head+" tail="+tail+" size="+size); if (size == 0) { return a; } if (a.length < size) { a = (Object[]) java.lang.reflect.Array.newInstance(a.getClass() .getComponentType(), size); } if (head < tail) { System.arraycopy(array, head, a, 0, tail - head); } else { System.arraycopy(array, head, a, 0, array.length - head); System.arraycopy(array, 0, a, array.length - head, tail); } if (a.length > size) { a[size] = null; } return a; } public Object get(int index) { rangeCheck(index); return array[convert(index)]; } public Object set(int index, Object element) { modCount++; rangeCheck(index); int convertedIndex = convert(index); Object oldValue = array[convertedIndex]; array[convertedIndex] = element; return oldValue; } public boolean add(Object o) { modCount++; // We have to have at least one empty space ensureCapacity(size + 1 + 1); array[tail] = o; tail = (tail + 1) % array.length; size++; return true; } // This method is the main reason we re-wrote the class. // It is optimized for removing first and last elements // but also allows you to remove in the middle of the list. public Object remove(int index) { modCount++; rangeCheck(index); int pos = convert(index); // an interesting application of try/finally is to avoid // having to use local variables try { return array[pos]; } finally { array[pos] = null; // Let gc do its work // optimized for FIFO access, i.e. adding to back and // removing from front if (pos == head) { head = (head + 1) % array.length; } else if (pos == tail) { tail = (tail - 1 + array.length) % array.length; } else { if ((pos > head) && (pos > tail)) { // tail/head/pos System.arraycopy(array, head, array, head + 1, pos - head); head = (head + 1) % array.length; } else { System.arraycopy(array, pos + 1, array, pos, tail - pos - 1); tail = (tail - 1 + array.length) % array.length; } } size } } public void clear() { modCount++; // Let gc do its work for (int i = 0; i != size; i++) { array[convert(i)] = null; } head = tail = size = 0; } public boolean addAll(java.util.Collection c) { modCount++; int numNew = c.size(); // We have to have at least one empty space ensureCapacity(size + numNew + 1); java.util.Iterator e = c.iterator(); for (int i = 0; i < numNew; i++) { array[tail] = e.next(); tail = (tail + 1) % array.length; size++; } return numNew != 0; } public void add(int index, Object element) { if (index == size) { add(element); return; } modCount++; rangeCheck(index); // We have to have at least one empty space ensureCapacity(size + 1 + 1); int pos = convert(index); if (pos == head) { head = (head - 1 + array.length) % array.length; array[head] = element; } else if (pos == tail) { array[tail] = element; tail = (tail + 1) % array.length; } else { if ((pos > head) && (pos > tail)) { // tail/head/pos System.arraycopy(array, pos, array, head - 1, pos - head + 1); head = (head - 1 + array.length) % array.length; } else { // head/pos/tail System.arraycopy(array, pos, array, pos + 1, tail - pos); tail = (tail + 1) % array.length; } array[pos] = element; } size++; } public boolean addAll(int index, java.util.Collection c) { boolean result = true; Iterator it = c.iterator(); while (it.hasNext()) { result &= this.add(it.next()); } return result; } // The convert() method takes a logical index (as if head was // always 0) and calculates the index within array private int convert(int index) { return (index + head) % array.length; } private void rangeCheck(int index) { if ((index >= size) || (index < 0)) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + size); } } public Iterator fastIterator() { return new CircularArrayListIterator(this); } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { s.writeInt(size); for (int i = 0; i != size; i++) { s.writeObject(array[convert(i)]); } } private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { // Read in size of list and allocate array head = 0; size = tail = s.readInt(); if (tail < DEFAULT_SIZE) { array = new Object[DEFAULT_SIZE]; } else { array = new Object[tail]; } // Read in all elements in the proper order. for (int i = 0; i < tail; i++) array[i] = s.readObject(); } /** * This class implements an iterator for the CircularArrayList. * It is more efficient than the default implementation of the * AbstractList Iterator. * @author Laurent Baduel */ private class CircularArrayListIterator implements Iterator { private CircularArrayList clist; private int pos; public CircularArrayListIterator(CircularArrayList clist) { this.clist = clist; this.pos = -1; } public boolean hasNext() { return (this.clist.convert(this.pos+1) < this.clist.size); } public Object next() { return this.clist.get(++this.pos); } public void remove() { this.clist.remove(this.pos); } } }
package gov.nih.nci.cananolab.ui.core; import gov.nih.nci.cananolab.dto.common.ProtocolFileBean; import gov.nih.nci.cananolab.dto.common.ReportBean; import gov.nih.nci.cananolab.dto.common.UserBean; import gov.nih.nci.cananolab.dto.particle.ParticleBean; import gov.nih.nci.cananolab.service.common.FileService; import gov.nih.nci.cananolab.service.common.impl.FileServiceLocalImpl; import gov.nih.nci.cananolab.service.particle.NanoparticleSampleService; import gov.nih.nci.cananolab.service.particle.impl.NanoparticleSampleServiceLocalImpl; import gov.nih.nci.cananolab.service.particle.impl.NanoparticleSampleServiceRemoteImpl; import gov.nih.nci.cananolab.service.protocol.ProtocolService; import gov.nih.nci.cananolab.service.protocol.impl.ProtocolServiceLocalImpl; import gov.nih.nci.cananolab.service.protocol.impl.ProtocolServiceRemoteImpl; import gov.nih.nci.cananolab.service.report.ReportService; import gov.nih.nci.cananolab.service.report.impl.ReportServiceLocalImpl; import gov.nih.nci.cananolab.service.report.impl.ReportServiceRemoteImpl; import gov.nih.nci.cananolab.util.CaNanoLabConstants; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.struts.action.Action; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; public class CountAction extends Action { public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { HttpSession session = request.getSession(); UserBean user = (UserBean) session.getAttribute("user"); String gridNodeHostStr = (String) request .getParameter("searchLocations"); String[] searchLocations = new String[0]; if (gridNodeHostStr != null) { searchLocations = gridNodeHostStr.split("~"); } ActionMessages msgs = new ActionMessages(); // particle count List<ParticleBean> foundParticles = new ArrayList<ParticleBean>(); String particleSource = ""; List<String> nanoparticleEntityClassNames = new ArrayList<String>(); List<String> otherNanoparticleEntityTypes = new ArrayList<String>(); List<String> functionalizingEntityClassNames = new ArrayList<String>(); List<String> otherFunctionalizingTypes = new ArrayList<String>(); List<String> functionClassNames = new ArrayList<String>(); List<String> otherFunctionTypes = new ArrayList<String>(); String[] charaClassNames = new String[0]; String[] words = null; for (String location : searchLocations) { List<ParticleBean> particles = null; NanoparticleSampleService service = null; if (location.equals("local")) { service = new NanoparticleSampleServiceLocalImpl(); } else { String serviceUrl = InitSetup.getInstance().getGridServiceUrl( request, location); service = new NanoparticleSampleServiceRemoteImpl(serviceUrl); } particles = service.findNanoparticleSamplesBy(particleSource, nanoparticleEntityClassNames.toArray(new String[0]), otherNanoparticleEntityTypes.toArray(new String[0]), functionalizingEntityClassNames.toArray(new String[0]), otherFunctionalizingTypes.toArray(new String[0]), functionClassNames.toArray(new String[0]), otherFunctionTypes.toArray(new String[0]), charaClassNames, words); for (ParticleBean particle : particles) { particle.setLocation(location); } if (location.equals("local")) { List<ParticleBean> filteredParticles = new ArrayList<ParticleBean>(); // set visibility for (ParticleBean particle : particles) { service.retrieveVisibility(particle, user); if (!particle.isHidden()) { filteredParticles.add(particle); } } foundParticles.addAll(filteredParticles); } else { if (particles == null || particles.size() == 0) { ActionMessage msg = new ActionMessage( "message.grid.discovery.none", CaNanoLabConstants.DOMAIN_MODEL_NAME); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); saveMessages(request, msgs); return mapping.getInputForward(); } foundParticles.addAll(particles); } } int particleCount = 0; if(foundParticles != null) particleCount = foundParticles.size(); // report count String reportTitle = ""; String reportCategory = ""; List<ReportBean> foundReports = new ArrayList<ReportBean>(); ReportService service = null; for (String location : searchLocations) { if (location.equals("local")) { service = new ReportServiceLocalImpl(); } else { String serviceUrl = InitSetup.getInstance().getGridServiceUrl( request, location); service = new ReportServiceRemoteImpl(serviceUrl); } List<ReportBean> reports = service.findReportsBy(reportTitle, reportCategory, nanoparticleEntityClassNames .toArray(new String[0]), otherNanoparticleEntityTypes.toArray(new String[0]), functionalizingEntityClassNames.toArray(new String[0]), otherFunctionalizingTypes.toArray(new String[0]), functionClassNames.toArray(new String[0]), otherFunctionTypes.toArray(new String[0])); for (ReportBean report : reports) { report.setLocation(location); } if (location.equals("local")) { List<ReportBean> filteredReports = new ArrayList<ReportBean>(); // retrieve visibility FileService fileService = new FileServiceLocalImpl(); for (ReportBean report : reports) { fileService.retrieveVisibility(report, user); if (!report.isHidden()) { filteredReports.add(report); } } foundReports.addAll(filteredReports); } else { foundReports.addAll(reports); } } int reportCount = 0; if(foundReports != null) reportCount = foundReports.size(); // protocol count String protocolType = ""; String protocolName = ""; String fileTitle = ""; List<ProtocolFileBean> foundProtocolFiles = new ArrayList<ProtocolFileBean>(); ProtocolService protocolService = null; for (String location : searchLocations) { if (location.equals("local")) { protocolService = new ProtocolServiceLocalImpl(); } else { String serviceUrl = InitSetup.getInstance().getGridServiceUrl( request, location); protocolService = new ProtocolServiceRemoteImpl(serviceUrl); } List<ProtocolFileBean> protocolFiles = protocolService.findProtocolFilesBy( protocolType, protocolName, fileTitle); if (location.equals("local")) { List<ProtocolFileBean> filteredProtocolFiles = new ArrayList<ProtocolFileBean>(); // retrieve visibility FileService fileService = new FileServiceLocalImpl(); for (ProtocolFileBean protocolFile : protocolFiles) { fileService.retrieveVisibility(protocolFile, user); if (!protocolFile.isHidden()) { filteredProtocolFiles.add(protocolFile); } } foundProtocolFiles.addAll(filteredProtocolFiles); } else { foundProtocolFiles.addAll(protocolFiles); } } int protocolCount = 0; if(foundProtocolFiles != null) protocolCount = foundProtocolFiles.size(); PrintWriter out = response.getWriter(); out.print(particleCount + "\t" + reportCount + "\t" + protocolCount); return null; } }
package gscrot.processor.watermark; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.ButtonGroup; import javax.swing.GroupLayout; import javax.swing.GroupLayout.Alignment; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JRadioButton; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.JTextPane; import javax.swing.LayoutStyle.ComponentPlacement; import javax.swing.ScrollPaneConstants; @SuppressWarnings("serial") public class DialogSettings extends JDialog { private JTextField textField; private JButton btnFont; private JButton btnColor; private JTextPane textPane; public DialogSettings() { setTitle("Watermark Settings"); ButtonGroup group = new ButtonGroup(); JRadioButton rdbtnImage = new JRadioButton("Image"); group.add(rdbtnImage); JLabel lblFile = new JLabel("File:"); textField = new JTextField(); textField.setEditable(false); textField.setColumns(10); JButton btnBrowse = new JButton("Browse"); JRadioButton rdbtnLabel = new JRadioButton("Label"); group.add(rdbtnLabel); rdbtnLabel.setSelected(true); JLabel lblText = new JLabel("Text:"); JScrollPane scrollPane = new JScrollPane(); scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS); btnFont = new JButton("Font"); btnFont.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent arg0) { JFontChooser j = new JFontChooser(); j.showDialog(DialogSettings.this); textPane.setFont(j.getSelectedFont()); } }); btnColor = new JButton("Color"); JButton btnOk = new JButton("OK"); btnOk.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { WatermarkPlugin.setFont(textPane.getFont()); } }); GroupLayout groupLayout = new GroupLayout(getContentPane()); groupLayout.setHorizontalGroup( groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(groupLayout.createSequentialGroup() .addContainerGap() .addGroup(groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(groupLayout.createSequentialGroup() .addComponent(rdbtnLabel) .addPreferredGap(ComponentPlacement.UNRELATED) .addComponent(lblText) .addPreferredGap(ComponentPlacement.UNRELATED) .addGroup(groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(groupLayout.createSequentialGroup() .addComponent(btnFont) .addPreferredGap(ComponentPlacement.RELATED) .addComponent(btnColor)) .addComponent(scrollPane, GroupLayout.PREFERRED_SIZE, 239, GroupLayout.PREFERRED_SIZE))) .addGroup(groupLayout.createParallelGroup(Alignment.TRAILING) .addComponent(btnOk) .addGroup(groupLayout.createSequentialGroup() .addComponent(rdbtnImage) .addPreferredGap(ComponentPlacement.UNRELATED) .addComponent(lblFile) .addGap(12) .addComponent(textField, GroupLayout.PREFERRED_SIZE, 240, GroupLayout.PREFERRED_SIZE) .addPreferredGap(ComponentPlacement.RELATED) .addComponent(btnBrowse)))) .addContainerGap(152, Short.MAX_VALUE)) ); groupLayout.setVerticalGroup( groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(groupLayout.createSequentialGroup() .addGroup(groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(groupLayout.createSequentialGroup() .addContainerGap() .addGroup(groupLayout.createParallelGroup(Alignment.BASELINE) .addComponent(rdbtnImage) .addComponent(lblFile))) .addGroup(groupLayout.createSequentialGroup() .addGap(8) .addGroup(groupLayout.createParallelGroup(Alignment.BASELINE) .addComponent(textField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addComponent(btnBrowse)))) .addPreferredGap(ComponentPlacement.UNRELATED) .addGroup(groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(groupLayout.createParallelGroup(Alignment.BASELINE) .addComponent(rdbtnLabel) .addComponent(lblText)) .addComponent(scrollPane, GroupLayout.PREFERRED_SIZE, 81, GroupLayout.PREFERRED_SIZE)) .addPreferredGap(ComponentPlacement.RELATED) .addGroup(groupLayout.createParallelGroup(Alignment.BASELINE) .addComponent(btnFont) .addComponent(btnColor)) .addPreferredGap(ComponentPlacement.RELATED, 75, Short.MAX_VALUE) .addComponent(btnOk) .addContainerGap()) ); textPane = new JTextPane(); scrollPane.setViewportView(textPane); getContentPane().setLayout(groupLayout); } }
package org.usfirst.frc.team997.robot.commands; import org.usfirst.frc.team997.robot.AHRSWrapper; import org.usfirst.frc.team997.robot.Robot; import edu.wpi.first.wpilibj.PIDController; import edu.wpi.first.wpilibj.PIDOutput; import edu.wpi.first.wpilibj.command.Command; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; public class DriveToAngle extends Command implements PIDOutput { public PIDController controller; private double pidRate; private double setPoint; public DriveToAngle(double angle) { requires(Robot.driveTrain); setPoint = angle; Robot.driveTrain.ahrs.reset(); controller = new PIDController(0.030, 0, 0.020, new AHRSWrapper(), this); controller.setInputRange(-180, 180); controller.setOutputRange(-.3, .3); controller.setAbsoluteTolerance(2.0); controller.setContinuous(true); LiveWindow.addActuator("DriveToAngle", "RotationController", controller); } // Called just before this Command runs the first time protected void initialize() { controller.setSetpoint(setPoint); controller.enable(); SmartDashboard.putBoolean("DriveToAngleOn", true); } // Called repeatedly when this Command is scheduled to run protected void execute() { SmartDashboard.putNumber("DriveToAngle currentRotationRate", pidRate); Robot.driveTrain.driveVoltage(-pidRate, pidRate); } // Make this return true when this Command no longer needs to run execute() protected boolean isFinished() { if(controller.get() == setPoint) { return true; } return false; } // Called once after isFinished returns true protected void end() { controller.disable(); SmartDashboard.putBoolean("DriveToAngleOn", false); } // Called when another command which requires one or more of the same // subsystems is scheduled to run protected void interrupted() { end(); } public void pidWrite(double output) { pidRate = output; } }
package org.vitrivr.cineast.core.setup; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import org.vitrivr.adampro.grpc.AdamGrpc.AckMessage; import org.vitrivr.adampro.grpc.AdamGrpc.AckMessage.Code; import org.vitrivr.adampro.grpc.AdamGrpc.AttributeDefinitionMessage; import org.vitrivr.adampro.grpc.AdamGrpc.AttributeType; import org.vitrivr.adampro.grpc.AdamGrpc.CreateEntityMessage; import org.vitrivr.cineast.core.data.entities.MediaObjectDescriptor; import org.vitrivr.cineast.core.data.entities.MediaObjectMetadataDescriptor; import org.vitrivr.cineast.core.data.entities.MediaSegmentDescriptor; import org.vitrivr.cineast.core.data.entities.MediaSegmentMetadataDescriptor; import org.vitrivr.cineast.core.db.adampro.ADAMproWrapper; import com.google.common.collect.ImmutableMap; public class ADAMproEntityCreator implements EntityCreator { /** * Wrapper used to send messages to ADAM pro. */ private ADAMproWrapper adampro = new ADAMproWrapper(); /** * Initialises the main entity holding information about multimedia objects in the ADAMpro * storage engine. */ @Override public boolean createMultiMediaObjectsEntity() { ArrayList<AttributeDefinitionMessage> attributes = new ArrayList<>(8); AttributeDefinitionMessage.Builder builder = AttributeDefinitionMessage.newBuilder(); attributes.add(builder.setName(MediaObjectDescriptor.FIELDNAMES[0]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); attributes.add(builder.setName(MediaObjectDescriptor.FIELDNAMES[1]).setAttributetype(AttributeType.INT).putAllParams(ImmutableMap.of("indexed", "true")).build()); builder.clear(); /* Clear builder to erase the indexed flag. */ attributes.add(builder.setName(MediaObjectDescriptor.FIELDNAMES[2]).setAttributetype(AttributeType.STRING).build()); attributes.add(builder.setName(MediaObjectDescriptor.FIELDNAMES[3]).setAttributetype(AttributeType.STRING).build()); CreateEntityMessage message = CreateEntityMessage.newBuilder().setEntity(MediaObjectDescriptor.ENTITY).addAllAttributes(attributes).build(); AckMessage ack = adampro.createEntityBlocking(message); if (ack.getCode() == AckMessage.Code.OK) { LOGGER.info("Successfully created multimedia object entity."); } else { LOGGER.error("Error occurred during creation of multimedia object entity: {}", ack.getMessage()); } return ack.getCode() == Code.OK; } /** * Initialises the entity responsible for holding metadata information about multimedia objects in a ADAMpro * storage. * * @see EntityCreator */ @Override public boolean createMetadataEntity() { final ArrayList<AttributeDefinitionMessage> fields = new ArrayList<>(4); final AttributeDefinitionMessage.Builder builder = AttributeDefinitionMessage.newBuilder(); fields.add(builder.setName(MediaObjectMetadataDescriptor.FIELDNAMES[0]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); fields.add(builder.setName(MediaObjectMetadataDescriptor.FIELDNAMES[1]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); fields.add(builder.setName(MediaObjectMetadataDescriptor.FIELDNAMES[2]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); builder.clear(); /* Clear builder to erase the indexed flag. */ fields.add(builder.setName(MediaObjectMetadataDescriptor.FIELDNAMES[3]).setAttributetype(AttributeType.STRING).build()); final CreateEntityMessage message = CreateEntityMessage.newBuilder().setEntity(MediaObjectMetadataDescriptor.ENTITY).addAllAttributes(fields).build(); final AckMessage ack = adampro.createEntityBlocking(message); if (ack.getCode() == AckMessage.Code.OK) { LOGGER.info("Successfully created metadata entity."); } else { LOGGER.error("Error occurred during creation of metadata entity: {}", ack.getMessage()); } return ack.getCode() == Code.OK; } @Override public boolean createSegmentMetadataEntity() { final ArrayList<AttributeDefinitionMessage> fields = new ArrayList<>(4); final AttributeDefinitionMessage.Builder builder = AttributeDefinitionMessage.newBuilder(); fields.add(builder.setName(MediaSegmentMetadataDescriptor.FIELDNAMES[0]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); fields.add(builder.setName(MediaSegmentMetadataDescriptor.FIELDNAMES[1]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); fields.add(builder.setName(MediaSegmentMetadataDescriptor.FIELDNAMES[2]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); builder.clear(); /* Clear builder to erase the indexed flag. */ fields.add(builder.setName(MediaSegmentMetadataDescriptor.FIELDNAMES[3]).setAttributetype(AttributeType.STRING).build()); final CreateEntityMessage message = CreateEntityMessage.newBuilder().setEntity(MediaSegmentMetadataDescriptor.ENTITY).addAllAttributes(fields).build(); final AckMessage ack = adampro.createEntityBlocking(message); if (ack.getCode() == AckMessage.Code.OK) { LOGGER.info("Successfully created metadata entity."); } else { LOGGER.error("Error occurred during creation of metadata entity: {}", ack.getMessage()); } return ack.getCode() == Code.OK; } /** * Initialises the entity responsible for holding information about segments of a multimedia object in the * ADAMpro storage engine. * * @see EntityCreator */ @Override public boolean createSegmentEntity() { final ArrayList<AttributeDefinitionMessage> fields = new ArrayList<>(4); final AttributeDefinitionMessage.Builder builder = AttributeDefinitionMessage.newBuilder(); fields.add(builder.setName(MediaSegmentDescriptor.FIELDNAMES[0]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); fields.add(builder.setName(MediaSegmentDescriptor.FIELDNAMES[1]).setAttributetype(AttributeType.STRING).putAllParams(ImmutableMap.of("indexed", "true")).build()); builder.clear(); /* Clear builder to erase the indexed flag. */ fields.add(builder.setName(MediaSegmentDescriptor.FIELDNAMES[2]).setAttributetype(AttributeType.INT).build()); fields.add(builder.setName(MediaSegmentDescriptor.FIELDNAMES[3]).setAttributetype(AttributeType.INT).build()); fields.add(builder.setName(MediaSegmentDescriptor.FIELDNAMES[4]).setAttributetype(AttributeType.INT).build()); fields.add(builder.setName(MediaSegmentDescriptor.FIELDNAMES[5]).setAttributetype(AttributeType.DOUBLE).build()); fields.add(builder.setName(MediaSegmentDescriptor.FIELDNAMES[6]).setAttributetype(AttributeType.DOUBLE).build()); final CreateEntityMessage message = CreateEntityMessage.newBuilder().setEntity(MediaSegmentDescriptor.ENTITY).addAllAttributes(fields).build(); final AckMessage ack = adampro.createEntityBlocking(message); if (ack.getCode() == AckMessage.Code.OK) { LOGGER.info("Successfully created segment entity."); } else { LOGGER.error("Error occurred during creation of segment entity: {}", ack.getMessage()); } return ack.getCode() == Code.OK; } /** * Creates and initializes a new feature entity with the provided name and the provided attributes. The new entity will have a field * called "id", which is of type "string" and has an index. Also, for each of the provided feature attribute a field of the type "vector" * will be created. * * @param featurename ame of the new entity. * @param unique Whether or not the provided feature should be unique per id. * @param featureAttributes List of the feature names. * @return True on success, false otherwise. */ @Override public boolean createFeatureEntity(String featurename, boolean unique, String... featureAttributes) { final AttributeDefinition[] attributes = Arrays.stream(featureAttributes) .map(s -> new AttributeDefinition(s, AttributeDefinition.AttributeType.VECTOR)) .toArray(AttributeDefinition[]::new); return this.createFeatureEntity(featurename, unique, attributes); } /** * Creates and initializes a new feature entity with the provided name and the provided attributes. The new entity will have a field * called "id", which is of type "string" and has an index. * * @param featurename Name of the new entity. * @param unique Whether or not the provided feature should be unique per id. * @param attributes List of {@link AttributeDefinition} objects specifying the new entities attributes. * @return True on success, false otherwise. */ @Override public boolean createFeatureEntity(String featurename, boolean unique, AttributeDefinition... attributes) { final AttributeDefinition[] extended = new AttributeDefinition[attributes.length + 1]; final HashMap<String,String> hints = new HashMap<>(1); hints.put("indexed", "true"); String handler = "cassandra"; for(AttributeDefinition def : attributes){ if(def.getType().equals(AttributeType.VECTOR) && def.hasHint("handler")){ handler = def.getHint("handler").get(); break; } } hints.put("handler", handler); extended[0] = new AttributeDefinition("id", AttributeDefinition.AttributeType.STRING, hints); System.arraycopy(attributes, 0, extended, 1, attributes.length); return this.createEntity(featurename, extended); } /** * Creates and initializes an entity with the provided name and the provided attributes. The new entity will have an additional field * prepended called "id", which is of type "string" and has an index. * * @param entityName Name of the new entity. * @param attributes List of {@link AttributeDefinition} objects specifying the new entities attributes. * @return True on success, false otherwise. */ @Override public boolean createIdEntity(String entityName, AttributeDefinition... attributes) { final AttributeDefinition[] extended = new AttributeDefinition[attributes.length + 1]; final HashMap<String,String> hints = new HashMap<>(1); hints.put("indexed", "true"); extended[0] = new AttributeDefinition("id", AttributeDefinition.AttributeType.STRING, hints); System.arraycopy(attributes, 0, extended, 1, attributes.length); return this.createEntity(entityName, extended); } /** * Creates and initializes an entity with the provided name and the provided attributes. * * @param entityName Name of the new entity. * @param attributes List of {@link AttributeDefinition} objects specifying the new entities attributes. * @return True on success, false otherwise. */ @Override public boolean createEntity(String entityName, AttributeDefinition... attributes) { final ArrayList<AttributeDefinitionMessage> fieldList = new ArrayList<>(); final AttributeDefinitionMessage.Builder builder = AttributeDefinitionMessage.newBuilder(); for (AttributeDefinition attribute : attributes) { builder.setName(attribute.getName()).setAttributetype(mapAttributeType(attribute.getType())); attribute.ifHintPresent("handler", builder::setHandler); //builder.setHandler("cassandra"); attribute.ifHintPresent("indexed", h -> builder.putAllParams(ImmutableMap.of("indexed", h))); fieldList.add(builder.build()); builder.clear(); } final CreateEntityMessage message = CreateEntityMessage.newBuilder().setEntity(entityName.toLowerCase()).addAllAttributes(fieldList).build(); final AckMessage ack = adampro.createEntityBlocking(message); if (ack.getCode() == AckMessage.Code.OK) { LOGGER.info("Successfully created entity '{}'", entityName); } else { LOGGER.error("Error while creating entity {}: '{}'", entityName, ack.getMessage()); } return ack.getCode() == Code.OK; } /* (non-Javadoc) * @see org.vitrivr.cineast.core.setup.IEntityCreator#existsEntity(java.lang.String) */ @Override public boolean existsEntity(String entityName) { return this.adampro.existsEntityBlocking(entityName); } @Override public boolean dropEntity(String entityName) { return this.adampro.dropEntityBlocking(entityName); } /* (non-Javadoc) * @see org.vitrivr.cineast.core.setup.IEntityCreator#close() */ @Override public void close() { this.adampro.close(); } public static final AttributeType mapAttributeType(org.vitrivr.cineast.core.setup.AttributeDefinition.AttributeType type) { switch (type) { case AUTO: return AttributeType.AUTO; case BOOLEAN: return AttributeType.BOOLEAN; case DOUBLE: return AttributeType.DOUBLE; case VECTOR: return AttributeType.VECTOR; case FLOAT: return AttributeType.FLOAT; case GEOGRAPHY: return AttributeType.GEOGRAPHY; case GEOMETRY: return AttributeType.GEOMETRY; case INT: return AttributeType.INT; case LONG: return AttributeType.LONG; case STRING: return AttributeType.STRING; case TEXT: return AttributeType.TEXT; default: return AttributeType.UNKOWNAT; } } }
package jade.core.messaging; import java.util.Date; import jade.core.ServiceFinder; import jade.core.HorizontalCommand; import jade.core.VerticalCommand; import jade.core.GenericCommand; import jade.core.Service; import jade.core.BaseService; import jade.core.ServiceException; import jade.core.Sink; import jade.core.Filter; import jade.core.Node; import jade.core.AgentContainer; import jade.core.MainContainer; import jade.core.CaseInsensitiveString; import jade.core.AID; import jade.core.ContainerID; import jade.core.Profile; import jade.core.ProfileException; import jade.core.IMTPException; import jade.core.NotFoundException; import jade.core.UnreachableException; import jade.domain.FIPAAgentManagement.InternalError; import jade.security.Authority; import jade.security.AgentPrincipal; import jade.security.PrivilegedExceptionAction; import jade.security.AuthException; import jade.lang.acl.ACLMessage; import jade.mtp.MTPDescriptor; import jade.mtp.MTPException; import jade.util.leap.Iterator; /** A minimal version of the JADE service to manage the message passing subsystem installed on the platform. This clsss just supports direct ACL message delivery, and relies on another one for any other feature (such as message routing and MTP management). @author Giovanni Rimassa - FRAMeTech s.r.l. */ public class LightMessagingService extends BaseService implements MessageManager.Channel { public static final String MAIN_SLICE = "Main-Container"; private static final String[] OWNED_COMMANDS = new String[] { MessagingSlice.SEND_MESSAGE, MessagingSlice.INSTALL_MTP, MessagingSlice.UNINSTALL_MTP, MessagingSlice.SET_PLATFORM_ADDRESSES }; public LightMessagingService(AgentContainer ac, Profile p) throws ProfileException { super(p); myContainer = ac; // Initialize its own ID // String platformID = myContainer.getPlatformID(); myMessageManager = MessageManager.instance(p); String helperSliceName = p.getParameter("accRouter", MAIN_SLICE); // Create a local slice localSlice = new ServiceComponent(helperSliceName); } public String getName() { return MessagingSlice.NAME; } public Class getHorizontalInterface() { try { return Class.forName(MessagingSlice.NAME + "Slice"); } catch(ClassNotFoundException cnfe) { return null; } } public Service.Slice getLocalSlice() { return localSlice; } public Filter getCommandFilter(boolean direction) { if(direction == Filter.OUTGOING) { return localSlice; } else { return null; } } public Sink getCommandSink(boolean side) { return null; } public String[] getOwnedCommands() { return OWNED_COMMANDS; } /** Inner mix-in class for this service: this class receives commands through its <code>Filter</code> interface and serves them, coordinating with remote parts of this service through the <code>Slice</code> interface (that extends the <code>Service.Slice</code> interface). */ private class ServiceComponent implements Filter, MessagingSlice { /** Builds a new messaging service lightweight component, relying on a remote slice for most operations. **/ public ServiceComponent(String helperName) { myHelperName = helperName; } // Entry point for the ACL message dispatching process public void deliverNow(ACLMessage msg, AID receiverID) throws UnreachableException, NotFoundException { try { if(myHelper == null) { myHelper = (MessagingSlice)getSlice(myHelperName); } deliverUntilOK(msg, receiverID); } catch(IMTPException imtpe) { throw new UnreachableException("Unreachable network node", imtpe); } catch(ServiceException se) { throw new UnreachableException("Unreachable service slice:", se); } } private void deliverUntilOK(ACLMessage msg, AID receiverID) throws IMTPException, NotFoundException, ServiceException { boolean ok = false; do { MessagingSlice mainSlice = (MessagingSlice)getSlice(MAIN_SLICE); ContainerID cid = mainSlice.getAgentLocation(receiverID); MessagingSlice targetSlice = (MessagingSlice)getSlice(cid.getName()); try { targetSlice.dispatchLocally(msg, receiverID); ok = true; } catch(NotFoundException nfe) { ok = false; // Stale proxy again, maybe the receiver is running around. Try again... } } while(!ok); } // Implementation of the Filter interface public void accept(VerticalCommand cmd) { // FIXME: Should set the exception somehow... try { String name = cmd.getName(); if(name.equals(SEND_MESSAGE)) { handleSendMessage(cmd); } if(name.equals(INSTALL_MTP)) { Object result = handleInstallMTP(cmd); cmd.setReturnValue(result); } else if(name.equals(UNINSTALL_MTP)) { handleUninstallMTP(cmd); } else if(name.equals(SET_PLATFORM_ADDRESSES)) { handleSetPlatformAddresses(cmd); } } catch(AuthException ae) { cmd.setReturnValue(ae); } catch(IMTPException imtpe) { imtpe.printStackTrace(); } catch(NotFoundException nfe) { nfe.printStackTrace(); } catch(ServiceException se) { se.printStackTrace(); } catch(MTPException mtpe) { mtpe.printStackTrace(); } } public void setBlocking(boolean newState) { // Do nothing. Blocking and Skipping not supported } public boolean isBlocking() { return false; // Blocking and Skipping not implemented } public void setSkipping(boolean newState) { // Do nothing. Blocking and Skipping not supported } public boolean isSkipping() { return false; // Blocking and Skipping not implemented } // Implementation of the Service.Slice interface public Service getService() { return LightMessagingService.this; } public Node getNode() throws ServiceException { try { return LightMessagingService.this.getLocalNode(); } catch(IMTPException imtpe) { throw new ServiceException("Problem in contacting the IMTP Manager", imtpe); } } public VerticalCommand serve(HorizontalCommand cmd) { try { String cmdName = cmd.getName(); Object[] params = cmd.getParams(); if(cmdName.equals(H_DISPATCHLOCALLY)) { ACLMessage msg = (ACLMessage)params[0]; AID receiverID = (AID)params[1]; dispatchLocally(msg, receiverID); } else if(cmdName.equals(H_ROUTEOUT)) { ACLMessage msg = (ACLMessage)params[0]; AID receiverID = (AID)params[1]; String address = (String)params[2]; routeOut(msg, receiverID, address); } else if(cmdName.equals(H_GETAGENTLOCATION)) { AID agentID = (AID)params[0]; cmd.setReturnValue(getAgentLocation(agentID)); } else if(cmdName.equals(H_INSTALLMTP)) { String address = (String)params[0]; String className = (String)params[1]; cmd.setReturnValue(installMTP(address, className)); } else if(cmdName.equals(H_UNINSTALLMTP)) { String address = (String)params[0]; uninstallMTP(address); } else if(cmdName.equals(H_NEWMTP)) { MTPDescriptor mtp = (MTPDescriptor)params[0]; ContainerID cid = (ContainerID)params[1]; newMTP(mtp, cid); } else if(cmdName.equals(H_DEADMTP)) { MTPDescriptor mtp = (MTPDescriptor)params[0]; ContainerID cid = (ContainerID)params[1]; deadMTP(mtp, cid); } else if(cmdName.equals(H_ADDROUTE)) { MTPDescriptor mtp = (MTPDescriptor)params[0]; String sliceName = (String)params[1]; addRoute(mtp, sliceName); } else if(cmdName.equals(H_REMOVEROUTE)) { MTPDescriptor mtp = (MTPDescriptor)params[0]; String sliceName = (String)params[1]; removeRoute(mtp, sliceName); } } catch(Throwable t) { cmd.setReturnValue(t); } finally { if(cmd instanceof VerticalCommand) { return (VerticalCommand)cmd; } else { return null; } } } // Implementation of the service-specific horizontal interface MessagingSlice public void dispatchLocally(ACLMessage msg, AID receiverID) throws IMTPException, NotFoundException { boolean found = myContainer.postMessageToLocalAgent(msg, receiverID); if(!found) { throw new NotFoundException("Messaging service slice failed to find " + receiverID); } } public void routeOut(ACLMessage msg, AID receiverID, String address) throws IMTPException, MTPException { try { if(myHelper == null) { myHelper = (MessagingSlice)getSlice(myHelperName); } myHelper.routeOut(msg, receiverID, address); } catch(ServiceException se) { throw new MTPException("No suitable route found for address " + address + "."); } } public ContainerID getAgentLocation(AID agentID) throws IMTPException, NotFoundException { throw new NotFoundException("Agent location lookup not supported by this slice"); } public MTPDescriptor installMTP(String address, String className) throws IMTPException, ServiceException, MTPException { throw new MTPException("Installing MTPs is not supported by this slice"); } public void uninstallMTP(String address) throws IMTPException, ServiceException, NotFoundException, MTPException { throw new MTPException("Uninstalling MTPs is not supported by this slice"); } public void newMTP(MTPDescriptor mtp, ContainerID cid) throws IMTPException, ServiceException { // Do nothing } public void deadMTP(MTPDescriptor mtp, ContainerID cid) throws IMTPException, ServiceException { // Do nothing } public void addRoute(MTPDescriptor mtp, String sliceName) throws IMTPException, ServiceException { // Do nothing } public void removeRoute(MTPDescriptor mtp, String sliceName) throws IMTPException, ServiceException { // Do nothing } private String myHelperName; private MessagingSlice myHelper; } // End of ServiceComponent class /** Activates the ACL codecs and MTPs as specified in the given <code>Profile</code> instance. @param myProfile The <code>Profile</code> instance containing the list of ACL codecs and MTPs to activate on this node. **/ public void activateProfile(Profile myProfile) { // Do nothing } public void deliverNow(ACLMessage msg, AID receiverID) throws UnreachableException { try { if(myContainer.livesHere(receiverID)) { localSlice.deliverNow(msg, receiverID); } else { // Dispatch it through the ACC Iterator addresses = receiverID.getAllAddresses(); while(addresses.hasNext()) { String address = (String)addresses.next(); try { forwardMessage(msg, receiverID, address); return; } catch(MTPException mtpe) { System.out.println("Bad address [" + address + "]: trying the next one..."); } } notifyFailureToSender(msg, receiverID, new InternalError("No valid address contained within the AID " + receiverID.getName())); } } catch(NotFoundException nfe) { // The receiver does not exist --> Send a FAILURE message notifyFailureToSender(msg, receiverID, new InternalError("Agent not found: " + nfe.getMessage())); } } private void forwardMessage(ACLMessage msg, AID receiver, String address) throws MTPException { try { localSlice.routeOut(msg, receiver, address); } catch(IMTPException imtpe) { throw new MTPException("Error during message routing", imtpe); } } /** * This method is used internally by the platform in order * to notify the sender of a message that a failure was reported by * the Message Transport Service. * Package scoped as it can be called by the MessageManager */ public void notifyFailureToSender(ACLMessage msg, AID receiver, InternalError ie) { //if (the sender is not the AMS and the performative is not FAILURE) if ( (msg.getSender()==null) || ((msg.getSender().equals(myContainer.getAMS())) && (msg.getPerformative()==ACLMessage.FAILURE))) // sanity check to avoid infinte loops return; // else send back a failure message final ACLMessage failure = msg.createReply(); failure.setPerformative(ACLMessage.FAILURE); //System.err.println(failure.toString()); final AID theAMS = myContainer.getAMS(); failure.setSender(theAMS); // FIXME: the content is not completely correct, but that should // also avoid creating wrong content // FIXME: the content should include the indication about the // receiver to wich dispatching failed. String content = "( (action " + msg.getSender().toString(); content = content + " ACLMessage ) " + ie.getMessage() + ")"; failure.setContent(content); try { Authority authority = myContainer.getAuthority(); authority.doPrivileged(new PrivilegedExceptionAction() { public Object run() { try { // FIXME: Having a custom code path for send failure notifications would be better... GenericCommand cmd = new GenericCommand(MessagingSlice.SEND_MESSAGE, MessagingSlice.NAME, null); cmd.addParam(failure); cmd.addParam(theAMS); handleSendMessage(cmd); } catch (AuthException ae) { // it never happens if the policy file gives System.out.println( ae.getMessage() ); } return null; // nothing to return } }); } catch(Exception e) { // should be never thrown e.printStackTrace(); } } // Vertical command handler methods private void handleSendMessage(VerticalCommand cmd) throws AuthException { Object[] params = cmd.getParams(); ACLMessage msg = (ACLMessage)params[0]; AID sender = (AID)params[1]; // Set the sender unless already set try { if (msg.getSender() == null) msg.setSender(sender); } catch (NullPointerException e) { msg.setSender(sender); } AgentPrincipal target1 = myContainer.getAgentPrincipal(msg.getSender()); Authority authority = myContainer.getAuthority(); authority.checkAction(Authority.AGENT_SEND_AS, target1, null); AuthException lastException = null; // 26-Mar-2001. The receivers set into the Envelope of the message, // if present, must have precedence over those set into the ACLMessage. // If no :intended-receiver parameter is present in the Envelope, // then the :to parameter // is used to generate :intended-receiver field. // create an Iterator with all the receivers to which the message must be // delivered Iterator it = msg.getAllIntendedReceiver(); while (it.hasNext()) { AID dest = (AID)it.next(); try { AgentPrincipal target2 = myContainer.getAgentPrincipal(dest); authority.checkAction(Authority.AGENT_SEND_TO, target2, null); ACLMessage copy = (ACLMessage)msg.clone(); boolean found = myContainer.postMessageToLocalAgent(copy, dest); if(!found) { myMessageManager.deliver(copy, dest, this); } } catch (AuthException ae) { lastException = ae; notifyFailureToSender(msg, dest, new InternalError(ae.getMessage())); } } if(lastException != null) throw lastException; } private MTPDescriptor handleInstallMTP(VerticalCommand cmd) throws IMTPException, ServiceException, NotFoundException, MTPException { Object[] params = cmd.getParams(); String address = (String)params[0]; ContainerID cid = (ContainerID)params[1]; String className = (String)params[2]; MessagingSlice targetSlice = (MessagingSlice)getSlice(cid.getName()); return targetSlice.installMTP(address, className); } private void handleUninstallMTP(VerticalCommand cmd) throws IMTPException, ServiceException, NotFoundException, MTPException { Object[] params = cmd.getParams(); String address = (String)params[0]; ContainerID cid = (ContainerID)params[1]; MessagingSlice targetSlice = (MessagingSlice)getSlice(cid.getName()); targetSlice.uninstallMTP(address); } private void handleSetPlatformAddresses(VerticalCommand cmd) { // Do nothing... } // The concrete agent container, providing access to LADT, etc. private final AgentContainer myContainer; // The local slice for this service private final ServiceComponent localSlice; // The component managing asynchronous message delivery and retries private final MessageManager myMessageManager; }
package org.xins.server; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.TimeZone; import javax.servlet.ServletRequest; import org.apache.log4j.Logger; import org.xins.types.Type; import org.xins.types.TypeValueException; import org.xins.types.standard.Text; import org.xins.util.MandatoryArgumentChecker; import org.xins.util.collections.BasicPropertyReader; import org.xins.util.collections.PropertyReader; import org.xins.util.collections.PropertiesPropertyReader; import org.xins.util.collections.expiry.ExpiryFolder; import org.xins.util.collections.expiry.ExpiryStrategy; import org.xins.util.io.FastStringWriter; import org.xins.util.manageable.BootstrapException; import org.xins.util.manageable.DeinitializationException; import org.xins.util.manageable.InitializationException; import org.xins.util.manageable.InvalidPropertyValueException; import org.xins.util.manageable.Manageable; import org.xins.util.manageable.MissingRequiredPropertyException; import org.xins.util.text.DateConverter; import org.xins.util.text.FastStringBuffer; import org.znerd.xmlenc.XMLOutputter; /** * Base class for API implementation classes. * * @version $Revision$ $Date$ * @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>) */ public abstract class API extends Manageable implements DefaultResultCodes { // Class fields /** * String returned by the function <code>_GetStatistics</code> when certain * information is not available. */ private static final String NOT_AVAILABLE = "N/A"; /** * Successful empty call result. */ private static final CallResult SUCCESSFUL_RESULT = new BasicCallResult(true, null, null, null); /** * Call result to be returned when the function name is missing in the * request. */ private static final CallResult MISSING_FUNCTION_NAME_RESULT = new BasicCallResult(false, "MissingFunctionName", null, null); /** * Call result to be returned when the function name does not denote an * existing function. */ private static final CallResult NO_SUCH_FUNCTION_RESULT = new BasicCallResult(false, "NoSuchFunction", null, null); // Class functions // Constructors protected API(String name) throws IllegalArgumentException { // Check preconditions MandatoryArgumentChecker.check("name", name); if (name.length() < 1) { throw new IllegalArgumentException("name.length() (" + name.length() + " < 1"); } // Initialize fields _name = name; _startupTimestamp = System.currentTimeMillis(); _manageableObjects = new ArrayList(); _functionsByName = new HashMap(); _functionList = new ArrayList(); _resultCodesByName = new HashMap(); _resultCodeList = new ArrayList(); } // Fields /** * The name of this API. Cannot be <code>null</code> and cannot be an empty * string. */ private final String _name; /** * Flag that indicates if this API is session-based. */ private boolean _sessionBased; /** * Flag that indicates if response validations should be enabled for the * functions in this API. */ private boolean _responseValidationEnabled; /** * List of registered manageable objects. See {@link #add(Manageable)}. * * <p />This field is initialized to a non-<code>null</code> value by the * constructor. */ private final List _manageableObjects; /** * Expiry strategy for <code>_sessionsByID</code>. * * <p />For session-based APIs, this field is initialized to a * non-<code>null</code> value by the initialization method * {@link #init(PropertyReader)}. */ private ExpiryStrategy _sessionExpiryStrategy; /** * Collection that maps session identifiers to <code>Session</code> * instances. Contains all sessions associated with this API. * * <p />For session-based APIs, this field is initialized to a * non-<code>null</code> value by the initialization method * {@link #init(PropertyReader)}. */ private ExpiryFolder _sessionsByID; /** * Map that maps function names to <code>Function</code> instances. * Contains all functions associated with this API. * * <p />This field is initialized to a non-<code>null</code> value by the * constructor. */ private final Map _functionsByName; /** * List of all functions. This field cannot be <code>null</code>. */ private final List _functionList; /** * Map that maps result code names to <code>ResultCode</code> instances. * Contains all result codes associated with this API. * * <p />This field is initialized to a non-<code>null</code> value by the * constructor. */ private final Map _resultCodesByName; /** * List of all result codes. This field cannot be <code>null</code>. */ private final List _resultCodeList; /** * The build-time settings. This field is initialized exactly once by * {@link #bootstrap(PropertyReader)}. It can be <code>null</code> before * that. */ private PropertyReader _buildSettings; /** * The runtime-time settings. This field is initialized by * {@link #init(PropertyReader)}. It can be <code>null</code> before that. */ private PropertyReader _runtimeSettings; /** * The name of the default function. Is <code>null</code> if there is no * default function. */ private String _defaultFunction; /** * The type that applies for session identifiers. For session-based APIs * this will be set in {@link #init(PropertyReader)}. */ private SessionIDType _sessionIDType; /** * The session ID generator. For session-based APIs this will be set in * {@link #init(PropertyReader)}. */ private SessionIDType.Generator _sessionIDGenerator; /** * Flag that indicates if the shutdown sequence has been initiated. */ private boolean _shutDown; // TODO: Use a state for this /** * Timestamp indicating when this API instance was created. */ private final long _startupTimestamp; /** * Deployment identifier. */ private String _deployment; /** * Host name for the machine that was used for this build. */ private String _buildHost; /** * Time stamp that indicates when this build was done. */ private String _buildTime; /** * XINS version used to build the web application package. */ private String _buildVersion; /** * The time zone used when generating dates for output. */ private TimeZone _timeZone; // Methods private final boolean getBooleanProperty(PropertyReader properties, String propertyName, boolean fallbackDefault) throws IllegalArgumentException, InvalidPropertyValueException { // Check preconditions MandatoryArgumentChecker.check("properties", properties, "propertyName", propertyName); String value = properties.get(propertyName); if (value == null || value.length() == 0) { return fallbackDefault; } if ("true".equals(value)) { return true; } else if ("false".equals(value)) { return false; } else { throw new InvalidPropertyValueException(propertyName, value); } } private final int getIntProperty(PropertyReader properties, String propertyName) throws IllegalArgumentException, NumberFormatException { // Check preconditions MandatoryArgumentChecker.check("properties", properties, "propertyName", propertyName); String value = properties.get(propertyName); return Integer.parseInt(value); } /** * Gets the name of this API. * * @return * the name of this API, never <code>null</code> and never an empty * string. */ public final String getName() { return _name; } /** * Gets the timestamp that indicates when this <code>API</code> instance * was created. * * @return * the time this instance was constructed, as a number of milliseconds * since midnight January 1, 1970. */ public final long getStartupTimestamp() { return _startupTimestamp; } /** * Returns the applicable time zone. * * @return * the time zone, not <code>null</code>. * * @since XINS 0.95 */ public final TimeZone getTimeZone() { return _timeZone; } public final int getCurrentSessions() throws IllegalStateException { // Check preconditions if (! _sessionBased) { throw new IllegalStateException("This API is not session-based."); } return _sessionsByID.size(); } /** * Checks if response validation is enabled. * * @return * <code>true</code> if response validation is enabled, * <code>false</code> otherwise. * * @since XINS 0.98 */ public final boolean isResponseValidationEnabled() { return _responseValidationEnabled; } protected final void bootstrapImpl(PropertyReader buildSettings) throws MissingRequiredPropertyException, InvalidPropertyValueException, BootstrapException { // Log the time zone // TODO: Why log the time zone? _timeZone = TimeZone.getDefault(); String tzLongName = _timeZone.getDisplayName(false, TimeZone.LONG); String tzShortName = _timeZone.getDisplayName(false, TimeZone.SHORT); if (tzLongName.equals(tzShortName)) { Library.BOOTSTRAP_LOG.info("Local time zone is " + tzLongName + '.'); } else { Library.BOOTSTRAP_LOG.info("Local time zone is " + tzShortName + " (" + tzLongName + ")."); } // Store the build-time settings _buildSettings = buildSettings; // Check if a default function is set _defaultFunction = _buildSettings.get("org.xins.api.defaultFunction"); if (_defaultFunction != null) { Library.BOOTSTRAP_LOG.debug("Default function set to \"" + _defaultFunction + "\"."); } // TODO: Check that default function exists. If not, set state // accordingly. // Check if this API is session-based _sessionBased = getBooleanProperty(buildSettings, "org.xins.api.sessionBased", false); Library.BOOTSTRAP_LOG.info("API is " + (_responseValidationEnabled ? "" : "not ") + "session-oriented."); // XXX: Allow configuration of session ID type ? // Initialize session-based API if (_sessionBased) { Library.BOOTSTRAP_LOG.debug("Performing session-related initialization."); // Initialize session ID type _sessionIDType = new BasicSessionIDType(this); _sessionIDGenerator = _sessionIDType.getGenerator(); // Determine session time-out duration and precision final long MINUTE_IN_MS = 60000L; long timeOut = MINUTE_IN_MS * (long) getIntProperty(buildSettings, "org.xins.api.sessionTimeOut"); long precision = MINUTE_IN_MS * (long) getIntProperty(buildSettings, "org.xins.api.sessionTimeOutPrecision"); // Create expiry strategy and folder _sessionExpiryStrategy = new ExpiryStrategy(timeOut, precision); _sessionsByID = new ExpiryFolder("sessionsByID", // name of folder (for logging) _sessionExpiryStrategy, // expiry strategy false, // strict thread sync checking? (TODO) 5000L); // max queue wait time in ms (TODO) } // Get build-time properties _deployment = _buildSettings.get("org.xins.api.deployment"); _buildHost = _buildSettings.get("org.xins.api.build.host"); _buildTime = _buildSettings.get("org.xins.api.build.time"); _buildVersion = _buildSettings.get("org.xins.api.build.version"); // Log build-time properties Logger log = Library.BOOTSTRAP_LOG; FastStringBuffer buffer = new FastStringBuffer(160); // - build host name buffer.append("Built on "); if (_buildHost != null && !("".equals(_buildHost))) { buffer.append("host "); buffer.append(_buildHost); } else { log.warn("Build host name is not set."); buffer.append("unknown host"); _buildHost = null; } // - build time if (_buildTime != null && !("".equals(_buildTime))) { buffer.append(" (at "); buffer.append(_buildTime); buffer.append(")"); } else { log.warn("Build time stamp is not set."); _buildTime = null; } // - deployment if (_deployment != null && !("".equals(_deployment))) { buffer.append(", for deployment \""); buffer.append(_deployment); buffer.append('"'); } else { _deployment = null; } // - XINS version if (_buildVersion != null && !("".equals(_buildVersion))) { buffer.append(", using XINS "); buffer.append(_buildVersion); } else { log.warn("Build version is not set."); _buildVersion = null; } buffer.append('.'); log.info(buffer.toString()); // Let the subclass perform initialization bootstrapImpl2(buildSettings); // Bootstrap all instances int count = _manageableObjects.size(); for (int i = 0; i < count; i++) { Manageable m = (Manageable) _manageableObjects.get(i); String className = m.getClass().getName(); log.debug("Bootstrapping manageable object of class " + className + " for " + _name + " API."); try { m.bootstrap(_buildSettings); log.info("Bootstrapped manageable object of class " + className + " for " + _name + " API."); } catch (Throwable exception) { String exMessage = exception.getMessage(); buffer.clear(); buffer.append("Failed to bootstrap manageable object of class "); buffer.append(className); buffer.append(" for "); buffer.append(_name); buffer.append(" API due to "); buffer.append(exception.getClass().getName()); if (exMessage == null || exMessage.length() < 1) { buffer.append('.'); } else { buffer.append(" with message \""); buffer.append(exMessage); buffer.append("\"."); } String message = buffer.toString(); log.error(message, exception); throw new BootstrapException(message); } } } /** * Bootstraps this API (implementation method). * * <p />The implementation of this method in class {@link API} is empty. * Custom subclasses can perform any necessary bootstrapping in this * class. * * <p />Note that bootstrapping and initialization are different. Bootstrap * includes only the one-time configuration of the API based on the * build-time settings, while the initialization * * <p />The {@link #add(Manageable)} may be called from this method, * and from this method <em>only</em>. * * @param buildSettings * the build-time properties, guaranteed not to be <code>null</code>. * * @throws MissingRequiredPropertyException * if a required property is not given. * * @throws InvalidPropertyValueException * if a property has an invalid value. * * @throws BootstrapException * if the initialization fails. */ protected void bootstrapImpl2(PropertyReader buildSettings) throws MissingRequiredPropertyException, InvalidPropertyValueException, BootstrapException { // empty } /** * Initializes this API. * * @param runtimeSettings * the runtime configuration settings, cannot be <code>null</code>. * * @throws InvalidPropertyValueException * if the initialization failed. * * @throws InitializationException * if the initialization failed. */ protected final void initImpl(PropertyReader runtimeSettings) throws InvalidPropertyValueException, InitializationException { // TODO: Check state Logger log = Library.INIT_LOG; log.debug("Initializing " + _name + " API."); // Store runtime settings _runtimeSettings = runtimeSettings; // Check if response validation is enabled _responseValidationEnabled = getBooleanProperty(runtimeSettings, "org.xins.api.responseValidation", false); log.info("Response validation is " + (_responseValidationEnabled ? "enabled." : "disabled.")); // Initialize all instances int count = _manageableObjects.size(); for (int i = 0; i < count; i++) { Manageable m = (Manageable) _manageableObjects.get(i); String className = m.getClass().getName(); log.debug("Initializing manageable object of class " + className + " for " + _name + " API."); try { m.init(runtimeSettings); log.info("Initialized manageable object of class " + className + " for " + _name + " API."); } catch (Throwable exception) { String exMessage = exception.getMessage(); FastStringBuffer buffer = new FastStringBuffer(100, "Failed to initialize manageable object of class "); buffer.append(className); buffer.append(" for "); buffer.append(_name); buffer.append(" API due to "); buffer.append(exception.getClass().getName()); if (exMessage == null || exMessage.length() < 1) { buffer.append('.'); } else { buffer.append(" with message \""); buffer.append(exMessage); buffer.append("\"."); } String message = buffer.toString(); log.error(message, exception); throw new InitializationException(message); } } // TODO: Call initImpl2(PropertyReader) ? log.debug("Initialized " + _name + " API."); } protected final void add(Manageable m) throws IllegalStateException, IllegalArgumentException { // Check state Manageable.State state = getState(); if (getState() != BOOTSTRAPPING) { // TODO: Log throw new IllegalStateException("State is " + state + " instead of " + BOOTSTRAPPING + '.'); } // Check preconditions MandatoryArgumentChecker.check("m", m); // Store the manageable object in the list _manageableObjects.add(m); Library.BOOTSTRAP_LOG.debug("Added manageable object " + m.getClass().getName() + " for " + _name + " API."); } /** * Performs shutdown of this XINS API. This method will never throw any * exception. */ protected final void deinitImpl() { _shutDown = true; // Stop expiry strategy _sessionExpiryStrategy.stop(); // Destroy all sessions int openSessionCount = _sessionsByID.size(); if (openSessionCount == 1) { Library.SHUTDOWN_LOG.info("Closing 1 open session."); } else { Library.SHUTDOWN_LOG.info("Closing " + openSessionCount + " open sessions."); } _sessionsByID = null; // Deinitialize instances int count = _manageableObjects.size(); for (int i = 0; i < count; i++) { Manageable m = (Manageable) _manageableObjects.get(i); String className = m.getClass().getName(); try { m.deinit(); Library.SHUTDOWN_LOG.info("Deinitialized manageable object of class " + className + " for " + _name + " API."); } catch (DeinitializationException exception) { Library.SHUTDOWN_LOG.error("Failed to deinitialize manageable object of class " + className + " for " + _name + " API.", exception); } } } /** * Returns the name of the default function, if any. * * @return * the name of the default function, or <code>null</code> if there is * none. */ public String getDefaultFunctionName() { // TODO: Check state return _defaultFunction; } public boolean isSessionBased() throws IllegalStateException { assertUsable(); return _sessionBased; } public final SessionIDType getSessionIDType() throws IllegalStateException { // Check preconditions assertUsable(); if (! _sessionBased) { throw new IllegalStateException("This API is not session-based."); } return _sessionIDType; } final Session createSession() throws IllegalStateException { // Check preconditions assertUsable(); if (! _sessionBased) { throw new IllegalStateException("This API is not session-based."); } // Generate a session ID that does not yet exist Object sessionID; do { sessionID = _sessionIDGenerator.generateSessionID(); } while (_sessionsByID.get(sessionID) != null); // Construct a Session object... Session session = new Session(this, sessionID); // ...store it... _sessionsByID.put(sessionID, session); // ...and then return it return session; } final Session getSession(Object id) throws IllegalStateException { // Check preconditions assertUsable(); if (! _sessionBased) { throw new IllegalStateException("This API is not session-based."); } return (Session) _sessionsByID.get(id); } final Session getSessionByString(String idString) throws IllegalStateException, TypeValueException { // Check preconditions assertUsable(); if (! _sessionBased) { throw new IllegalStateException("This API is not session-based."); } return getSession(_sessionIDType.fromString(idString)); } /** * Callback method invoked when a function is constructed. * * @param function * the function that is added, not <code>null</code>. * * @throws NullPointerException * if <code>function == null</code>. */ final void functionAdded(Function function) throws NullPointerException { // TODO: Check the state here? _functionsByName.put(function.getName(), function); _functionList.add(function); // TODO: After all functions are added, check that the default function // is set. } /** * Callback method invoked when a result code is constructed. * * @param resultCode * the result code that is added, not <code>null</code>. * * @throws NullPointerException * if <code>resultCode == null</code>. */ final void resultCodeAdded(ResultCode resultCode) throws NullPointerException { _resultCodesByName.put(resultCode.getName(), resultCode); _resultCodeList.add(resultCode); } /** * Returns the function with the specified name. * * @param name * the name of the function, will not be checked if it is * <code>null</code>. * * @return * the function with the specified name, or <code>null</code> if there * is no match. */ final Function getFunction(String name) { return (Function) _functionsByName.get(name); } /** * Forwards a call to a function. The call will actually be handled by * {@link Function#handleCall(long,ServletRequest)}. * * @param start * the start time of the request, in milliseconds since midnight January * 1, 1970. * * @param request * the original servlet request, not <code>null</code>. * * @return * the result of the call, never <code>null</code>. * * @throws NullPointerException * if <code>request == null</code>. */ final CallResult handleCall(long start, ServletRequest request) throws NullPointerException { // Determine the function name String functionName = request.getParameter("_function"); if (functionName == null || functionName.length() == 0) { functionName = request.getParameter("function"); } if (functionName == null || functionName.length() == 0) { functionName = getDefaultFunctionName(); } // The function name is required if (functionName == null || functionName.length() == 0) { return MISSING_FUNCTION_NAME_RESULT; } // Detect special functions if (functionName.charAt(0) == '_') { if ("_NoOp".equals(functionName)) { return SUCCESSFUL_RESULT; } else if ("_PerformGC".equals(functionName)) { return doPerformGC(); } else if ("_GetFunctionList".equals(functionName)) { return doGetFunctionList(); } else if ("_GetStatistics".equals(functionName)) { return doGetStatistics(); } else if ("_GetVersion".equals(functionName)) { return doGetVersion(); } else if ("_GetSettings".equals(functionName)) { return doGetSettings(); } else if ("_DisableFunction".equals(functionName)) { return doDisableFunction(request); } else if ("_EnableFunction".equals(functionName)) { return doEnableFunction(request); } else { return NO_SUCH_FUNCTION_RESULT; } } // Short-circuit if we are shutting down if (_shutDown) { // TODO: Add message return new BasicCallResult(false, "InternalError", null, null); } // Get the function object Function function = getFunction(functionName); if (function == null) { return NO_SUCH_FUNCTION_RESULT; } // Forward the call to the function return function.handleCall(start, request); } /** * Performs garbage collection. * * @return * the call result, never <code>null</code>. */ private final CallResult doPerformGC() { System.gc(); return SUCCESSFUL_RESULT; } /** * Returns a list of all functions in this API. Per function the name and * the version are returned. * * @return * the call result, never <code>null</code>. */ private final CallResult doGetFunctionList() { // Initialize a builder CallResultBuilder builder = new CallResultBuilder(); int count = _functionList.size(); for (int i = 0; i < count; i++) { Function function = (Function) _functionList.get(i); builder.startTag("function"); builder.attribute("name", function.getName()); builder.attribute("version", function.getVersion()); builder.attribute("enabled", function.isEnabled() ? "true" : "false"); builder.endTag(); } return builder; } /** * Returns the call statistics for all functions in this API. * * @return * the call result, never <code>null</code>. */ private final CallResult doGetStatistics() { // Initialize a builder CallResultBuilder builder = new CallResultBuilder(); builder.param("startup", DateConverter.toDateString(_timeZone, _startupTimestamp)); builder.param("now", DateConverter.toDateString(_timeZone, System.currentTimeMillis())); // Currently available processors Runtime rt = Runtime.getRuntime(); try { builder.param("availableProcessors", String.valueOf(rt.availableProcessors())); } catch (NoSuchMethodError error) { // ignore: Runtime.availableProcessors() is not available in Java 1.3 } // Heap memory statistics builder.startTag("heap"); long free = rt.freeMemory(); long total = rt.totalMemory(); builder.attribute("used", String.valueOf(total - free)); builder.attribute("free", String.valueOf(free)); builder.attribute("total", String.valueOf(total)); try { builder.attribute("max", String.valueOf(rt.maxMemory())); } catch (NoSuchMethodError error) { // ignore: Runtime.maxMemory() is not available in Java 1.3 } builder.endTag(); // heap // Function-specific statistics int count = _functionList.size(); for (int i = 0; i < count; i++) { Function function = (Function) _functionList.get(i); Function.Statistics stats = function.getStatistics(); long successfulCalls = stats.getSuccessfulCalls(); long unsuccessfulCalls = stats.getUnsuccessfulCalls(); long successfulDuration = stats.getSuccessfulDuration(); long unsuccessfulDuration = stats.getUnsuccessfulDuration(); String successfulAverage; String successfulMin; String successfulMinStart; String successfulMax; String successfulMaxStart; String lastSuccessfulStart; String lastSuccessfulDuration; if (successfulCalls == 0) { successfulAverage = NOT_AVAILABLE; successfulMin = NOT_AVAILABLE; successfulMinStart = NOT_AVAILABLE; successfulMax = NOT_AVAILABLE; successfulMaxStart = NOT_AVAILABLE; lastSuccessfulStart = NOT_AVAILABLE; lastSuccessfulDuration = NOT_AVAILABLE; } else if (successfulDuration == 0) { successfulAverage = "0"; successfulMin = String.valueOf(stats.getSuccessfulMin()); successfulMinStart = DateConverter.toDateString(_timeZone, stats.getSuccessfulMinStart()); successfulMax = String.valueOf(stats.getSuccessfulMax()); successfulMaxStart = DateConverter.toDateString(_timeZone, stats.getSuccessfulMaxStart()); lastSuccessfulStart = DateConverter.toDateString(_timeZone, stats.getLastSuccessfulStart()); lastSuccessfulDuration = String.valueOf(stats.getLastSuccessfulDuration()); } else { successfulAverage = String.valueOf(successfulDuration / successfulCalls); successfulMin = String.valueOf(stats.getSuccessfulMin()); successfulMinStart = DateConverter.toDateString(_timeZone, stats.getSuccessfulMinStart()); successfulMax = String.valueOf(stats.getSuccessfulMax()); successfulMaxStart = DateConverter.toDateString(_timeZone, stats.getSuccessfulMaxStart()); lastSuccessfulStart = DateConverter.toDateString(_timeZone, stats.getLastSuccessfulStart()); lastSuccessfulDuration = String.valueOf(stats.getLastSuccessfulDuration()); } String unsuccessfulAverage; String unsuccessfulMin; String unsuccessfulMinStart; String unsuccessfulMax; String unsuccessfulMaxStart; String lastUnsuccessfulStart; String lastUnsuccessfulDuration; if (unsuccessfulCalls == 0) { unsuccessfulAverage = NOT_AVAILABLE; unsuccessfulMin = NOT_AVAILABLE; unsuccessfulMinStart = NOT_AVAILABLE; unsuccessfulMax = NOT_AVAILABLE; unsuccessfulMaxStart = NOT_AVAILABLE; lastUnsuccessfulStart = NOT_AVAILABLE; lastUnsuccessfulDuration = NOT_AVAILABLE; } else if (unsuccessfulDuration == 0) { unsuccessfulAverage = "0"; unsuccessfulMin = String.valueOf(stats.getUnsuccessfulMin()); unsuccessfulMinStart = DateConverter.toDateString(_timeZone, stats.getUnsuccessfulMinStart()); unsuccessfulMax = String.valueOf(stats.getUnsuccessfulMax()); unsuccessfulMaxStart = DateConverter.toDateString(_timeZone, stats.getUnsuccessfulMaxStart()); lastUnsuccessfulStart = DateConverter.toDateString(_timeZone, stats.getLastUnsuccessfulStart()); lastUnsuccessfulDuration = String.valueOf(stats.getLastUnsuccessfulDuration()); } else { unsuccessfulAverage = String.valueOf(unsuccessfulDuration / unsuccessfulCalls); unsuccessfulMin = String.valueOf(stats.getUnsuccessfulMin()); unsuccessfulMinStart = DateConverter.toDateString(_timeZone, stats.getUnsuccessfulMinStart()); unsuccessfulMax = String.valueOf(stats.getUnsuccessfulMax()); unsuccessfulMaxStart = DateConverter.toDateString(_timeZone, stats.getUnsuccessfulMaxStart()); lastUnsuccessfulStart = DateConverter.toDateString(_timeZone, stats.getLastUnsuccessfulStart()); lastUnsuccessfulDuration = String.valueOf(stats.getLastUnsuccessfulDuration()); } builder.startTag("function"); builder.attribute("name", function.getName()); // Successful builder.startTag("successful"); builder.attribute("count", String.valueOf(successfulCalls)); builder.attribute("average", successfulAverage); builder.startTag("min"); builder.attribute("start", successfulMinStart); builder.attribute("duration", successfulMin); builder.endTag(); // min builder.startTag("max"); builder.attribute("start", successfulMaxStart); builder.attribute("duration", successfulMax); builder.endTag(); // max builder.startTag("last"); builder.attribute("start", lastSuccessfulStart); builder.attribute("duration", lastSuccessfulDuration); builder.endTag(); // last builder.endTag(); // successful // Unsuccessful builder.startTag("unsuccessful"); builder.attribute("count", String.valueOf(unsuccessfulCalls)); builder.attribute("average", unsuccessfulAverage); builder.startTag("min"); builder.attribute("start", unsuccessfulMinStart); builder.attribute("duration", unsuccessfulMin); builder.endTag(); // min builder.startTag("max"); builder.attribute("start", unsuccessfulMaxStart); builder.attribute("duration", unsuccessfulMax); builder.endTag(); // max builder.startTag("last"); builder.attribute("start", lastUnsuccessfulStart); builder.attribute("duration", lastUnsuccessfulDuration); builder.endTag(); // last builder.endTag(); // unsuccessful builder.endTag(); // function } return builder; } /** * Returns the XINS version. * * @return * the call result, never <code>null</code>. */ private final CallResult doGetVersion() { CallResultBuilder builder = new CallResultBuilder(); builder.param("java.version", System.getProperty("java.version")); builder.param("xmlenc.version", org.znerd.xmlenc.Library.getVersion()); builder.param("xins.version", Library.getVersion()); return builder; } /** * Returns the settings. * * @return * the call result, never <code>null</code>. */ private final CallResult doGetSettings() { CallResultBuilder builder = new CallResultBuilder(); // Build settings Iterator names = _buildSettings.getNames(); builder.startTag("build"); while (names.hasNext()) { String key = (String) names.next(); String value = _buildSettings.get(key); builder.startTag("property"); builder.attribute("name", key); builder.pcdata(value); builder.endTag(); } builder.endTag(); // Runtime settings names = _runtimeSettings.getNames(); builder.startTag("runtime"); while (names.hasNext()) { String key = (String) names.next(); String value = _runtimeSettings.get(key); builder.startTag("property"); builder.attribute("name", key); builder.pcdata(value); builder.endTag(); } builder.endTag(); // System properties Enumeration e = System.getProperties().propertyNames(); builder.startTag("system"); while (e.hasMoreElements()) { String key = (String) e.nextElement(); String value = System.getProperty(key); if (key != null && value != null && key.length() > 0 && value.length() > 0) { builder.startTag("property"); builder.attribute("name", key); builder.pcdata(value); builder.endTag(); } } builder.endTag(); return builder; } /** * Enables a function. * * @param request * the servlet request, cannot be <code>null</code>. * * @return * the call result, never <code>null</code>. * * @throws NullPointerException * if <code>request == null</code>. */ private final CallResult doEnableFunction(ServletRequest request) { // Get the name of the function to enable String functionName = request.getParameter("functionName"); if (functionName == null || functionName.length() < 1) { return new BasicCallResult(false, "MissingParameters", null, null); } // Get the Function object Function function = getFunction(functionName); if (function == null) { return new BasicCallResult(false, "InvalidParameters", null, null); } // Enable or disable the function function.setEnabled(true); return SUCCESSFUL_RESULT; } /** * Disables a function. * * @param request * the servlet request, cannot be <code>null</code>. * * @return * the call result, never <code>null</code>. * * @throws NullPointerException * if <code>request == null</code>. */ private final CallResult doDisableFunction(ServletRequest request) { // Get the name of the function to disable String functionName = request.getParameter("functionName"); if (functionName == null || functionName.length() < 1) { return new BasicCallResult(false, "MissingParameters", null, null); } // Get the Function object Function function = getFunction(functionName); if (function == null) { return new BasicCallResult(false, "InvalidParameters", null, null); } // Enable or disable the function function.setEnabled(false); return SUCCESSFUL_RESULT; } }
package axiom.scripting.rhino; import java.io.*; import java.lang.reflect.Method; import java.util.HashSet; import javax.activation.MimetypesFileTypeMap; import org.mozilla.javascript.*; import eu.medsea.util.MimeUtil; import axiom.framework.ErrorReporter; import axiom.framework.core.Application; import axiom.objectmodel.INode; import axiom.util.MimePart; import axiom.util.TextExtractor; public class FileObjectCtor extends FunctionObject { RhinoCore core; static Method fileObjCtor; static { try { fileObjCtor = FileObjectCtor.class.getMethod("jsConstructor", new Class[] { Context.class, Object[].class, Function.class, Boolean.TYPE }); } catch (NoSuchMethodException e) { throw new RuntimeException("Error getting FileObjectCtor.jsConstructor()"); } } static HashSet tmpfiles = new HashSet(); static final int attr = ScriptableObject.DONTENUM | ScriptableObject.PERMANENT | ScriptableObject.READONLY; /** * Create and install a AxiomObject constructor. * Part of this is copied from o.m.j.FunctionObject.addAsConstructor(). * * @param prototype */ public FileObjectCtor(RhinoCore core, Scriptable prototype) { super("File", fileObjCtor, core.global); this.core = core; addAsConstructor(core.global, prototype); } /** * This method is used as AxiomObject constructor from JavaScript. */ public static Object jsConstructor(Context cx, Object[] args, Function ctorObj, boolean inNewExpr) throws Exception { if(args == null || (args != null && args.length == 0)){ throw new Exception("Invalid constructor parameters, new File(String|Mimepart, [optional] Boolean)"); } FileObjectCtor ctor = (FileObjectCtor) ctorObj; RhinoCore core = ctor.core; String protoname = ctor.getFunctionName(); INode node = new axiom.objectmodel.db.Node("_" + protoname, protoname, core.app.getWrappedNodeManager()); Scriptable proto = core.getPrototype(protoname); FileObject fobj = null; if (args != null && args.length > 0 && (args[0] instanceof NativeJavaObject || args[0] instanceof MimePart || args[0] instanceof String)){ Object obj = args[0]; if (args[0] instanceof NativeJavaObject) { obj = ((NativeJavaObject) args[0]).unwrap(); } if (obj instanceof MimePart || obj instanceof String) { fobj = new FileObject("File", core, node, proto, true); boolean extractText = false; if(args.length > 1){ if(args[1] instanceof Scriptable){ Scriptable s = (Scriptable) args[1]; String className = s.getClassName(); if("Boolean".equals(className)) { extractText = ScriptRuntime.toBoolean(s); } } else if(args[1] instanceof Boolean){ extractText = ((Boolean)args[1]).booleanValue(); } } FileObjectCtor.setup(fobj, node, args, core.app, !extractText); } else if (args[0] instanceof Scriptable) { Scriptable data = (Scriptable) args[0]; fobj = new FileObject("File", core, node, proto, data); } } else if (args[0] instanceof Scriptable) { Scriptable data = (Scriptable) args[0]; fobj = new FileObject("File", core, node, proto, data); } return fobj; } protected static String setup(FileObject fobj, INode node, Object[] args, Application app, boolean extractContent) { String ret = null; if (args != null && args.length > 0) { if (args[0] == null) { throw new RuntimeException("FileObjectCtor: first argument cannot be null!"); } if (args[0] instanceof NativeJavaObject || args[0] instanceof MimePart) { MimePart mp; if (args[0] instanceof NativeJavaObject) { mp = (MimePart) ((NativeJavaObject) args[0]).unwrap(); } else { mp = (MimePart) args[0]; } String filename = mp.getName(); node.setString(FileObject.FILE_NAME, filename); node.setString(FileObject.ACCESSNAME, generateAccessName(filename)); final String tmpdir = (String) FileObject.tmpDirs.get(app.getName()); filename = generateTmpFileName(filename, tmpdir); mp.writeToFile(tmpdir, filename); fobj.tmpPath = tmpdir + filename; node.setInteger(FileObject.FILE_SIZE, mp.contentLength); String mimetype = mp.contentType; if (mimetype == null || mimetype.equals("application/octet-stream")) { mimetype = MimeUtil.getMimeType(new File(fobj.tmpPath)); } node.setString(FileObject.CONTENT_TYPE, mimetype); node.setString(FileObject.RENDERED_CONTENT, "false"); node.setJavaObject(FileObject.SELF, fobj); node.setString(FileObject.FILE_UPLOAD, "true"); String text = extractContent ? extractText(fobj.tmpPath, app) : null; if (text != null) { node.setString(FileObject.CONTENT, text); } ret = fobj.tmpPath; } else if (args[0] instanceof String) { final String path = (String) args[0]; final File file = new File(path); if (!file.exists() || !file.isFile()) { throw new RuntimeException("FileObjectCtor: " + path + " does not point to a readable file"); } String filename = file.getName(); node.setString(FileObject.FILE_NAME, filename); node.setString(FileObject.ACCESSNAME, FileObjectCtor.generateAccessName(filename)); fobj.tmpPath = file.getAbsolutePath(); node.setInteger(FileObject.FILE_SIZE, file.length()); String mimetype = guessContentType(file); if (mimetype == null || mimetype.equals("application/octet-stream")) { mimetype = MimeUtil.getMimeType(new File(fobj.tmpPath)); } node.setString(FileObject.CONTENT_TYPE, mimetype); node.setString(FileObject.RENDERED_CONTENT, "false"); node.setJavaObject(FileObject.SELF, fobj); node.setString(FileObject.FILE_UPLOAD, "false"); String text = extractContent ? extractText(fobj.tmpPath, app) : null; if (text != null) { node.setString(FileObject.CONTENT, text); } ret = fobj.tmpPath; } else { throw new RuntimeException("FileObjectCtor: first argument to constructor must be the MimePart object or the location of a file"); } } else { throw new RuntimeException("FileObjectCtor: must specify arguments to the File constructor!"); } return ret; } public static void removeTmp(String tmpFile) { synchronized (tmpfiles) { tmpfiles.remove(tmpFile); } } public static String generateTmpFileName(final String filename, final String tmpdir) { StringBuffer tmpBuffer = new StringBuffer(tmpdir); tmpBuffer.append(Long.toString(System.currentTimeMillis())) .append(".") .append(filename); int tmpcount = 0; String appender = ""; String tmpFile = tmpBuffer.toString(); synchronized (tmpfiles) { while (tmpfiles.contains(tmpFile + appender)) { appender = Integer.toString(tmpcount++); } tmpfiles.add(tmpBuffer.append(appender).toString()); } return tmpBuffer.delete(0, tmpdir.length()).toString().replaceAll(" ", "_"); } public static String normalizePath(String path) { String separator = File.separator; String wrong; if (separator.equals("\\")) { wrong = "/"; } else { wrong = "\\"; } StringBuffer pathBuffer = new StringBuffer(path); int index = -1; while ((index = pathBuffer.indexOf(wrong, index)) > -1) { pathBuffer.replace(index, index + 1, separator); } return pathBuffer.toString(); } protected static String extractText(String file, Application app) { String text = null; int index = file.lastIndexOf(File.separator); if (index > -1) { String fileExt = file.substring(index); if (fileExt != null) { fileExt = fileExt.toLowerCase(); FileInputStream fis = null; try { fis = new FileInputStream(file); if (fileExt.indexOf(".pdf") > -1) { text = TextExtractor.adobePDFExtractor(fis); } else if (fileExt.indexOf(".doc") > -1) { text = TextExtractor.msWordExtractor(fis); } else if (fileExt.indexOf(".xls") > -1) { text = TextExtractor.msExcelExtractor(fis); } else if (fileExt.indexOf(".ppt") > -1) { text = TextExtractor.msPowerPointExtractor(fis); } else if (isTextFile(fileExt)) { text = extractText(fis); } } catch (Exception ex) { app.logError(ErrorReporter.errorMsg(FileObjectCtor.class, "extractText") + "Failed for the file " + file, ex); text = null; } finally { if (fis != null) { try { fis.close(); } catch (Exception ignore) { app.logError(ErrorReporter.errorMsg(FileObjectCtor.class, "extractText"), ignore); } fis = null; } } } } return text; } private static boolean isTextFile(String fileExt) { int idx = fileExt.lastIndexOf("."); String ext = idx > 0 ? fileExt.substring(idx + 1).trim().toLowerCase() : null; if(ext.equals("txt") || ext.equals("properties") || ext.equals("java") || ext.equals("html") || ext.equals("xml") || ext.equals("css")){ return true; } else { return false; } } private static String extractText(FileInputStream fis) { String str = null, line = null; BufferedReader br = null; try { br = new BufferedReader(new FileReader(fis.getFD())); StringBuffer sb = new StringBuffer(); while ((line = br.readLine()) != null) { sb.append(line); } str = sb.toString(); } catch (IOException ioex) { str = null; } finally { if (br != null) { try { br.close(); } catch (IOException ignore) { } br = null; } } return str; } protected static String generateAccessName(String filename) { String accessName = filename; int indexOfExt; if ((indexOfExt = accessName.lastIndexOf(".")) > 0) { accessName = accessName.substring(0, indexOfExt); } return accessName; } protected static String guessContentType(File file) { String name = file.getName(); int idx = name.lastIndexOf("."); String ext = null; if (idx > 0) { ext = name.substring(idx + 1).trim().toLowerCase(); } if (ext != null) { if ("doc".equals(ext) || "dot".equals(ext)) { return "application/msword"; } else if ("pdf".equals(ext)) { return "application/pdf"; } else if ("xls".equals(ext)) { return "application/excel"; } else if ("ppt".equals(ext)) { return "application/ppt"; } else if ("rtf".equals(ext)) { return "application/rtf"; } else if ("ps".equals(ext) || "ai".equals(ext) || "eps".equals(ext)) { return "application/postscript"; } } return new MimetypesFileTypeMap().getContentType(file); } }
package net.sf.picard.sam; import net.sf.picard.PicardException; import net.sf.picard.cmdline.Usage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import net.sf.picard.util.PeekableIterator; import net.sf.samtools.*; import net.sf.samtools.SAMFileHeader.SortOrder; import net.sf.picard.util.Log; import net.sf.samtools.util.RuntimeIOException; import net.sf.picard.cmdline.CommandLineProgram; import net.sf.picard.cmdline.Option; import net.sf.picard.cmdline.StandardOptionDefinitions; import net.sf.picard.io.IoUtil; import net.sf.samtools.util.SortingCollection; /** * Class to fix mate pair information for all reads in a SAM file. Will run in fairly limited * memory unless there are lots of mate pairs that are far apart from each other in the file. * * @author Tim Fennell */ public class FixMateInformation extends CommandLineProgram { @Usage public final String USAGE = "Ensure that all mate-pair information is in sync between each read " + " and it's mate pair. If no OUTPUT file is supplied then the output is written to a temporary file " + " and then copied over the INPUT file."; @Option(shortName=StandardOptionDefinitions.INPUT_SHORT_NAME, doc="The input file to fix.") public List<File> INPUT; @Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, optional=true, doc="The output file to write to. If no output file is supplied, the input file is overwritten.") public File OUTPUT; @Option(shortName=StandardOptionDefinitions.SORT_ORDER_SHORT_NAME, optional=true, doc="Optional sort order if the OUTPUT file should be sorted differently than the INPUT file.") public SortOrder SORT_ORDER; private static final Log log = Log.getInstance(FixMateInformation.class); protected SAMFileWriter out; public static void main(final String[] args) { new FixMateInformation().instanceMainWithExit(args); } protected int doWork() { // Open up the input boolean allQueryNameSorted = true; final List<SAMFileReader> readers = new ArrayList<SAMFileReader>(); for (final File f : INPUT) { IoUtil.assertFileIsReadable(f); SAMFileReader reader = new SAMFileReader(f); readers.add(new SAMFileReader(f)); if (reader.getFileHeader().getSortOrder() != SortOrder.queryname) allQueryNameSorted = false; } // Decide where to write the fixed file - into the specified output file // or into a temporary file that will overwrite the INPUT file eventually if (OUTPUT != null) OUTPUT = OUTPUT.getAbsoluteFile(); final boolean differentOutputSpecified = OUTPUT != null; if (differentOutputSpecified) { IoUtil.assertFileIsWritable(OUTPUT); } else if (INPUT.size() != 1) { throw new PicardException("Must specify either an explicit OUTPUT file or a single INPUT file to be overridden."); } else { final File soleInput = INPUT.get(0).getAbsoluteFile(); final File dir = soleInput.getParentFile().getAbsoluteFile(); try { IoUtil.assertFileIsWritable(soleInput); IoUtil.assertDirectoryIsWritable(dir); OUTPUT = File.createTempFile(soleInput.getName() + ".being_fixed.", ".bam", dir); } catch (IOException ioe) { throw new RuntimeIOException("Could not create tmp file in " + dir.getAbsolutePath()); } } // Get the input records merged and sorted by query name as needed final PeekableIterator<SAMRecord> iterator; final SAMFileHeader header; { // Deal with merging if necessary Iterator<SAMRecord> tmp; if (INPUT.size() > 1) { List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>(readers.size()); for (SAMFileReader reader : readers) { headers.add(reader.getFileHeader()); } final SortOrder sortOrder = (allQueryNameSorted? SortOrder.queryname: SortOrder.unsorted); final SamFileHeaderMerger merger = new SamFileHeaderMerger(sortOrder, headers, false); tmp = new MergingSamRecordIterator(merger, readers, false); header = merger.getMergedHeader(); } else { tmp = readers.get(0).iterator(); header = readers.get(0).getFileHeader(); } // And now deal with re-sorting if necessary if (allQueryNameSorted) { iterator = new PeekableIterator<SAMRecord>(tmp); } else { log.info("Sorting input into queryname order."); final SortingCollection<SAMRecord> sorter = SortingCollection.newInstance(SAMRecord.class, new BAMRecordCodec(header), new SAMRecordQueryNameComparator(), MAX_RECORDS_IN_RAM, TMP_DIR); while (tmp.hasNext()) { sorter.add(tmp.next()); } iterator = new PeekableIterator<SAMRecord>(sorter.iterator()); log.info("Sorting by queryname complete."); } // Deal with the various sorting complications final SortOrder outputSortOrder = SORT_ORDER == null ? readers.get(0).getFileHeader().getSortOrder() : SORT_ORDER; log.info("Output will be sorted by " + outputSortOrder); header.setSortOrder(outputSortOrder); } if (CREATE_INDEX && header.getSortOrder() != SortOrder.coordinate){ throw new PicardException("Can't CREATE_INDEX unless sort order is coordinate"); } createSamFileWriter(header); log.info("Traversing query name sorted records and fixing up mate pair information."); long count = 0; while (iterator.hasNext()) { final SAMRecord rec1 = iterator.next(); final SAMRecord rec2 = iterator.hasNext() ? iterator.peek() : null; if (rec2 != null && rec1.getReadName().equals(rec2.getReadName())) { iterator.next(); // consume the peeked record SamPairUtil.setMateInfo(rec1, rec2, header); writeAlignment(rec1); writeAlignment(rec2); count += 2; } else { writeAlignment(rec1); ++count; } if (count % 1000000 == 0) { log.info("Processed " + count + " records."); } } if (header.getSortOrder() == SortOrder.queryname) { log.info("Closing output file."); } else { log.info("Finished processing reads; re-sorting output file."); } closeWriter(); // Lastly if we're fixing in place, swap the files if (!differentOutputSpecified) { log.info("Replacing input file with fixed file."); final File soleInput = INPUT.get(0).getAbsoluteFile(); final File old = new File(soleInput.getParentFile(), soleInput.getName() + ".old"); if (!old.exists() && soleInput.renameTo(old)) { if (OUTPUT.renameTo(soleInput)) { if (!old.delete()) { log.warn("Could not delete old file: " + old.getAbsolutePath()); return 1; } } else { log.error("Could not move new file to " + soleInput.getAbsolutePath()); log.error("Input file preserved as: " + old.getAbsolutePath()); log.error("New file preserved as: " + OUTPUT.getAbsolutePath()); return 1; } } else { log.error("Could not move input file out of the way: " + soleInput.getAbsolutePath()); if (!OUTPUT.delete()) { log.error("Could not delete temporary file: " + OUTPUT.getAbsolutePath()); } return 1; } } return 0; } protected void createSamFileWriter(SAMFileHeader header) { out = new SAMFileWriterFactory().makeSAMOrBAMWriter(header, header.getSortOrder() == SortOrder.queryname, OUTPUT); } protected void writeAlignment(SAMRecord sam) { out.addAlignment(sam); } protected void closeWriter() { out.close(); } }
package org.apache.commons.digester; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.InputStream; import java.io.IOException; import java.io.PrintWriter; import java.io.Reader; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.EmptyStackException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.apache.commons.collections.ArrayStack; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; public class Digester extends DefaultHandler { /** * Construct a new Digester with default properties. */ public Digester() { super(); } /** * Construct a new Digester, allowing a SAXParser to be passed in. This * allows Digester to be used in environments which are unfriendly to * JAXP1.1 (such as WebLogic 6.0). Thanks for the request to change go to * James House (james@interobjective.com). This may help in places where * you are able to load JAXP 1.1 classes yourself. */ public Digester(SAXParser parser) { super(); this.parser = parser; } /** * Construct a new Digester, allowing an XMLReader to be passed in. This * allows Digester to be used in environments which are unfriendly to * JAXP1.1 (such as WebLogic 6.0). Note that if you use this option you * have to configure namespace and validation support yourself, as these * properties only affect the SAXParser and emtpy constructor. */ public Digester(XMLReader reader) { super(); this.reader = reader; } /** * The body text of the current element. */ protected StringBuffer bodyText = new StringBuffer(); /** * The stack of body text string buffers for surrounding elements. */ protected ArrayStack bodyTexts = new ArrayStack(); /** * The class loader to use for instantiating application objects. * If not specified, the context class loader, or the class loader * used to load Digester itself, is used, based on the value of the * <code>useContextClassLoader</code> variable. */ protected ClassLoader classLoader = null; /** * Has this Digester been configured yet. */ protected boolean configured = false; /** * The URLs of entityValidator that have been registered, keyed by the public * identifier that corresponds. */ protected HashMap entityValidator = new HashMap(); /** * The application-supplied error handler that is notified when parsing * warnings, errors, or fatal errors occur. */ protected ErrorHandler errorHandler = null; /** * The SAXParserFactory that is created the first time we need it. */ protected SAXParserFactory factory = null; /** * The JAXP 1.2 property required to set up the schema location. */ private static final String JAXP_SCHEMA_SOURCE = "http://java.sun.com/xml/jaxp/properties/schemaSource"; /** * The JAXP 1.2 property to set up the schemaLanguage used. */ private String JAXP_SCHEMA_LANGUAGE = "http://java.sun.com/xml/jaxp/properties/schemaLanguage"; /** * The Locator associated with our parser. */ protected Locator locator = null; /** * The current match pattern for nested element processing. */ protected String match = ""; /** * Do we want a "namespace aware" parser. */ protected boolean namespaceAware = false; /** * Registered namespaces we are currently processing. The key is the * namespace prefix that was declared in the document. The value is an * ArrayStack of the namespace URIs this prefix has been mapped to -- * the top Stack element is the most current one. (This architecture * is required because documents can declare nested uses of the same * prefix for different Namespace URIs). */ protected HashMap namespaces = new HashMap(); /** * The parameters stack being utilized by CallMethodRule and * CallParamRule rules. */ protected ArrayStack params = new ArrayStack(); /** * The SAXParser we will use to parse the input stream. */ protected SAXParser parser = null; /** * The public identifier of the DTD we are currently parsing under * (if any). */ protected String publicId = null; /** * The XMLReader used to parse digester rules. */ protected XMLReader reader = null; /** * The "root" element of the stack (in other words, the last object * that was popped. */ protected Object root = null; /** * The <code>Rules</code> implementation containing our collection of * <code>Rule</code> instances and associated matching policy. If not * established before the first rule is added, a default implementation * will be provided. */ protected Rules rules = null; /** * The XML schema language to use for validating an XML instance. By * default this value is set to <code>W3C_XML_SCHEMA</code> */ private String schemaLanguage = W3C_XML_SCHEMA; /** * The XML schema to use for validating an XML instance. */ private String schemaLocation = null; /** * The object stack being constructed. */ protected ArrayStack stack = new ArrayStack(); /** * Do we want to use the Context ClassLoader when loading classes * for instantiating new objects. Default is <code>false</code>. */ protected boolean useContextClassLoader = false; /** * Do we want to use a validating parser. */ protected boolean validating = false; /** * The Log to which most logging calls will be made. */ protected Log log = LogFactory.getLog("org.apache.commons.digester.Digester"); /** * The Log to which all SAX event related logging calls will be made. */ private Log saxLog = LogFactory.getLog("org.apache.commons.digester.Digester.sax"); /** * The schema language supported. By default, we use this one. */ private static final String W3C_XML_SCHEMA = "http: /** * Return the currently mapped namespace URI for the specified prefix, * if any; otherwise return <code>null</code>. These mappings come and * go dynamically as the document is parsed. * * @param prefix Prefix to look up */ public String findNamespaceURI(String prefix) { ArrayStack stack = (ArrayStack) namespaces.get(prefix); if (stack == null) { return (null); } try { return ((String) stack.peek()); } catch (EmptyStackException e) { return (null); } } /** * Return the class loader to be used for instantiating application objects * when required. This is determined based upon the following rules: * <ul> * <li>The class loader set by <code>setClassLoader()</code>, if any</li> * <li>The thread context class loader, if it exists and the * <code>useContextClassLoader</code> property is set to true</li> * <li>The class loader used to load the Digester class itself. * </ul> */ public ClassLoader getClassLoader() { if (this.classLoader != null) { return (this.classLoader); } if (this.useContextClassLoader) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader != null) { return (classLoader); } } return (this.getClass().getClassLoader()); } /** * Set the class loader to be used for instantiating application objects * when required. * * @param classLoader The new class loader to use, or <code>null</code> * to revert to the standard rules */ public void setClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; } /** * Return the current depth of the element stack. */ public int getCount() { return (stack.size()); } /** * Return the name of the XML element that is currently being processed. */ public String getCurrentElementName() { String elementName = match; int lastSlash = elementName.lastIndexOf('/'); if (lastSlash >= 0) { elementName = elementName.substring(lastSlash + 1); } return (elementName); } /** * Return the debugging detail level of our currently enabled logger. * * @deprecated Configure the logger using standard mechanisms * for your implementation */ public int getDebug() { return (0); } /** * Set the debugging detail level of our currently enabled logger. * * @param debug New debugging detail level (0=off, increasing integers * for more detail) * * @deprecated Configure the logger using standard mechanisms * for your implementation */ public void setDebug(int debug) { ; // No action is taken } /** * Return the error handler for this Digester. */ public ErrorHandler getErrorHandler() { return (this.errorHandler); } /** * Set the error handler for this Digester. * * @param errorHandler The new error handler */ public void setErrorHandler(ErrorHandler errorHandler) { this.errorHandler = errorHandler; } /** * Return the SAXParserFactory we will use, creating one if necessary. */ public SAXParserFactory getFactory() { if (factory == null) { factory = SAXParserFactory.newInstance(); factory.setNamespaceAware(namespaceAware); factory.setValidating(validating); } return (factory); } public boolean getFeature(String feature) throws ParserConfigurationException, SAXNotRecognizedException, SAXNotSupportedException { return (getFactory().getFeature(feature)); } public void setFeature(String feature, boolean value) throws ParserConfigurationException, SAXNotRecognizedException, SAXNotSupportedException { getFactory().setFeature(feature, value); } /** * Return the current Logger associated with this instance of the Digester */ public Log getLogger() { return log; } /** * Set the current logger for this Digester. */ public void setLogger(Log log) { this.log = log; } /** * Return the current rule match path */ public String getMatch() { return match; } /** * Return the "namespace aware" flag for parsers we create. */ public boolean getNamespaceAware() { return (this.namespaceAware); } /** * Set the "namespace aware" flag for parsers we create. * * @param namespaceAware The new "namespace aware" flag */ public void setNamespaceAware(boolean namespaceAware) { this.namespaceAware = namespaceAware; } /** * Return the public identifier of the DTD we are currently * parsing under, if any. */ public String getPublicId() { return (this.publicId); } /** * Return the namespace URI that will be applied to all subsequently * added <code>Rule</code> objects. */ public String getRuleNamespaceURI() { return (getRules().getNamespaceURI()); } /** * Set the namespace URI that will be applied to all subsequently * added <code>Rule</code> objects. * * @param ruleNamespaceURI Namespace URI that must match on all * subsequently added rules, or <code>null</code> for matching * regardless of the current namespace URI */ public void setRuleNamespaceURI(String ruleNamespaceURI) { getRules().setNamespaceURI(ruleNamespaceURI); } /** * Return the SAXParser we will use to parse the input stream. If there * is a problem creating the parser, return <code>null</code>. */ public SAXParser getParser() { // Return the parser we already created (if any) if (parser != null) { return (parser); } // Create a new parser try { parser = getFactory().newSAXParser(); } catch (Exception e) { log.error("Digester.getParser: ", e); return (null); } // Configure standard properties and return the new instance try { setProperty(JAXP_SCHEMA_LANGUAGE, schemaLanguage); } catch (Exception e) { log.warn("" + e); } try { if (schemaLocation != null) { setProperty(JAXP_SCHEMA_SOURCE, schemaLocation); } } catch (Exception e) { log.warn("" + e); } return (parser); } public Object getProperty(String property) throws SAXNotRecognizedException, SAXNotSupportedException { return (getParser().getProperty(property)); } public void setProperty(String property, Object value) throws SAXNotRecognizedException, SAXNotSupportedException { getParser().setProperty(property, value); } /** * By setting the reader in the constructor, you can bypass JAXP and * be able to use digester in Weblogic 6.0. * * @deprecated Use getXMLReader() instead, which can throw a * SAXException if the reader cannot be instantiated */ public XMLReader getReader() { try { return (getXMLReader()); } catch (SAXException e) { log.error("Cannot get XMLReader", e); return (null); } } /** * Return the <code>Rules</code> implementation object containing our * rules collection and associated matching policy. If none has been * established, a default implementation will be created and returned. */ public Rules getRules() { if (this.rules == null) { this.rules = new RulesBase(); this.rules.setDigester(this); } return (this.rules); } /** * Set the <code>Rules</code> implementation object containing our * rules collection and associated matching policy. * * @param rules New Rules implementation */ public void setRules(Rules rules) { this.rules = rules; this.rules.setDigester(this); } /** * Return the XML Schema URI used for validating an XML instance. */ public String getSchema() { return (this.schemaLocation); } /** * Set the XML Schema URI used for validating a XML Instance. * * @param schemaLocation a URI to the schema. */ public void setSchema(String schemaLocation){ this.schemaLocation = schemaLocation; } /** * Return the XML Schema language used when parsing. */ public String getSchemaLanguage() { return (this.schemaLanguage); } /** * Set the XML Schema language used when parsing. By default, we use W3C. * * @param schemaLanguage a URI to the schema language. */ public void setSchemaLanguage(String schemaLanguage){ schemaLanguage = schemaLanguage; } /** * Return the boolean as to whether the context classloader should be used. */ public boolean getUseContextClassLoader() { return useContextClassLoader; } /** * Determine whether to use the Context ClassLoader (the one found by * calling <code>Thread.currentThread().getContextClassLoader()</code>) * to resolve/load classes that are defined in various rules. If not * using Context ClassLoader, then the class-loading defaults to * using the calling-class' ClassLoader. * * @param boolean determines whether to use Context ClassLoader. */ public void setUseContextClassLoader(boolean use) { useContextClassLoader = use; } /** * Return the validating parser flag. */ public boolean getValidating() { return (this.validating); } /** * Set the validating parser flag. This must be called before * <code>parse()</code> is called the first time. * * @param validating The new validating parser flag. */ public void setValidating(boolean validating) { this.validating = validating; } /** * Return the XMLReader to be used for parsing the input document. * * FIX ME: there is a bug in JAXP/XERCES that prevent the use of a * parser that contains a schema with a DTD. * @exception SAXException if no XMLReader can be instantiated */ public XMLReader getXMLReader() throws SAXException { if (reader == null){ reader = getParser().getXMLReader(); } reader.setDTDHandler(this); reader.setContentHandler(this); reader.setEntityResolver(this); reader.setErrorHandler(this); return reader; } /** * Process notification of character data received from the body of * an XML element. * * @param buffer The characters from the XML document * @param start Starting offset into the buffer * @param length Number of characters from the buffer * * @exception SAXException if a parsing error is to be reported */ public void characters(char buffer[], int start, int length) throws SAXException { if (saxLog.isDebugEnabled()) { saxLog.debug("characters(" + new String(buffer, start, length) + ")"); } bodyText.append(buffer, start, length); } /** * Process notification of the end of the document being reached. * * @exception SAXException if a parsing error is to be reported */ public void endDocument() throws SAXException { if (saxLog.isDebugEnabled()) { if (getCount() > 1) { saxLog.debug("endDocument(): " + getCount() + " elements left"); } else { saxLog.debug("endDocument()"); } } while (getCount() > 1) { pop(); } // Fire "finish" events for all defined rules Iterator rules = getRules().rules().iterator(); while (rules.hasNext()) { Rule rule = (Rule) rules.next(); try { rule.finish(); } catch (Exception e) { log.error("Finish event threw exception", e); throw createSAXException(e); } catch (Error e) { log.error("Finish event threw error", e); throw e; } } // Perform final cleanup clear(); } /** * Process notification of the end of an XML element being reached. * * @param uri - The Namespace URI, or the empty string if the * element has no Namespace URI or if Namespace processing is not * being performed. * @param localName - The local name (without prefix), or the empty * string if Namespace processing is not being performed. * @param qName - The qualified XML 1.0 name (with prefix), or the * empty string if qualified names are not available. * @exception SAXException if a parsing error is to be reported */ public void endElement(String namespaceURI, String localName, String qName) throws SAXException { boolean debug = log.isDebugEnabled(); if (debug) { if (saxLog.isDebugEnabled()) { saxLog.debug("endElement(" + namespaceURI + "," + localName + "," + qName + ")"); } log.debug(" match='" + match + "'"); log.debug(" bodyText='" + bodyText + "'"); } // Fire "body" events for all relevant rules List rules = getRules().match(namespaceURI, match); if ((rules != null) && (rules.size() > 0)) { String bodyText = this.bodyText.toString(); for (int i = 0; i < rules.size(); i++) { try { Rule rule = (Rule) rules.get(i); if (debug) { log.debug(" Fire body() for " + rule); } rule.body(bodyText); } catch (Exception e) { log.error("Body event threw exception", e); throw createSAXException(e); } catch (Error e) { log.error("Body event threw error", e); throw e; } } } else { if (debug) { log.debug(" No rules found matching '" + match + "'."); } } // Recover the body text from the surrounding element bodyText = (StringBuffer) bodyTexts.pop(); if (debug) { log.debug(" Popping body text '" + bodyText.toString() + "'"); } // Fire "end" events for all relevant rules in reverse order if (rules != null) { for (int i = 0; i < rules.size(); i++) { int j = (rules.size() - i) - 1; try { Rule rule = (Rule) rules.get(j); if (debug) { log.debug(" Fire end() for " + rule); } rule.end(); } catch (Exception e) { log.error("End event threw exception", e); throw createSAXException(e); } catch (Error e) { log.error("End event threw error", e); throw e; } } } // Recover the previous match expression int slash = match.lastIndexOf('/'); if (slash >= 0) { match = match.substring(0, slash); } else { match = ""; } } /** * Process notification that a namespace prefix is going out of scope. * * @param prefix Prefix that is going out of scope * * @exception SAXException if a parsing error is to be reported */ public void endPrefixMapping(String prefix) throws SAXException { if (saxLog.isDebugEnabled()) { saxLog.debug("endPrefixMapping(" + prefix + ")"); } // Deregister this prefix mapping ArrayStack stack = (ArrayStack) namespaces.get(prefix); if (stack == null) { return; } try { stack.pop(); if (stack.empty()) namespaces.remove(prefix); } catch (EmptyStackException e) { throw createSAXException("endPrefixMapping popped too many times"); } } /** * Process notification of ignorable whitespace received from the body of * an XML element. * * @param buffer The characters from the XML document * @param start Starting offset into the buffer * @param length Number of characters from the buffer * * @exception SAXException if a parsing error is to be reported */ public void ignorableWhitespace(char buffer[], int start, int len) throws SAXException { if (saxLog.isDebugEnabled()) { saxLog.debug("ignorableWhitespace(" + new String(buffer, start, len) + ")"); } ; // No processing required } /** * Process notification of a processing instruction that was encountered. * * @param target The processing instruction target * @param data The processing instruction data (if any) * * @exception SAXException if a parsing error is to be reported */ public void processingInstruction(String target, String data) throws SAXException { if (saxLog.isDebugEnabled()) { saxLog.debug("processingInstruction('" + target + "','" + data + "')"); } ; // No processing is required } /** * Set the document locator associated with our parser. * * @param locator The new locator */ public void setDocumentLocator(Locator locator) { if (saxLog.isDebugEnabled()) { saxLog.debug("setDocumentLocator(" + locator + ")"); } this.locator = locator; } /** * Process notification of a skipped entity. * * @param name Name of the skipped entity * * @exception SAXException if a parsing error is to be reported */ public void skippedEntity(String name) throws SAXException { if (saxLog.isDebugEnabled()) { saxLog.debug("skippedEntity(" + name + ")"); } ; // No processing required } /** * Process notification of the beginning of the document being reached. * * @exception SAXException if a parsing error is to be reported */ public void startDocument() throws SAXException { if (saxLog.isDebugEnabled()) { saxLog.debug("startDocument()"); } // ensure that the digester is properly configured, as // the digester could be used as a SAX ContentHandler // rather than via the parse() methods. configure(); } /** * Process notification of the start of an XML element being reached. * * @param uri The Namespace URI, or the empty string if the element * has no Namespace URI or if Namespace processing is not being performed. * @param localName The local name (without prefix), or the empty * string if Namespace processing is not being performed. * @param qName The qualified name (with prefix), or the empty * string if qualified names are not available.\ * @param list The attributes attached to the element. If there are * no attributes, it shall be an empty Attributes object. * @exception SAXException if a parsing error is to be reported */ public void startElement(String namespaceURI, String localName, String qName, Attributes list) throws SAXException { boolean debug = log.isDebugEnabled(); if (saxLog.isDebugEnabled()) { saxLog.debug("startElement(" + namespaceURI + "," + localName + "," + qName + ")"); } // Save the body text accumulated for our surrounding element bodyTexts.push(bodyText); if (debug) { log.debug(" Pushing body text '" + bodyText.toString() + "'"); } bodyText = new StringBuffer(); // Compute the current matching rule StringBuffer sb = new StringBuffer(match); if (match.length() > 0) { sb.append('/'); } if ((localName == null) || (localName.length() < 1)) { sb.append(qName); } else { sb.append(localName); } match = sb.toString(); if (debug) { log.debug(" New match='" + match + "'"); } // Fire "begin" events for all relevant rules List rules = getRules().match(namespaceURI, match); if ((rules != null) && (rules.size() > 0)) { String bodyText = this.bodyText.toString(); for (int i = 0; i < rules.size(); i++) { try { Rule rule = (Rule) rules.get(i); if (debug) { log.debug(" Fire begin() for " + rule); } rule.begin(list); } catch (Exception e) { log.error("Begin event threw exception", e); throw createSAXException(e); } catch (Error e) { log.error("Begin event threw error", e); throw e; } } } else { if (debug) { log.debug(" No rules found matching '" + match + "'."); } } } /** * Process notification that a namespace prefix is coming in to scope. * * @param prefix Prefix that is being declared * @param namespaceURI Corresponding namespace URI being mapped to * * @exception SAXException if a parsing error is to be reported */ public void startPrefixMapping(String prefix, String namespaceURI) throws SAXException { if (saxLog.isDebugEnabled()) { saxLog.debug("startPrefixMapping(" + prefix + "," + namespaceURI + ")"); } // Register this prefix mapping ArrayStack stack = (ArrayStack) namespaces.get(prefix); if (stack == null) { stack = new ArrayStack(); namespaces.put(prefix, stack); } stack.push(namespaceURI); } /** * Receive notification of a notation declaration event. * * @param name The notation name * @param publicId The public identifier (if any) * @param systemId The system identifier (if any) */ public void notationDecl(String name, String publicId, String systemId) { if (saxLog.isDebugEnabled()) { saxLog.debug("notationDecl(" + name + "," + publicId + "," + systemId + ")"); } } /** * Receive notification of an unparsed entity declaration event. * * @param name The unparsed entity name * @param publicId The public identifier (if any) * @param systemId The system identifier (if any) * @param notation The name of the associated notation */ public void unparsedEntityDecl(String name, String publicId, String systemId, String notation) { if (saxLog.isDebugEnabled()) { saxLog.debug("unparsedEntityDecl(" + name + "," + publicId + "," + systemId + "," + notation + ")"); } } /** * Resolve the requested external entity. * * @param publicId The public identifier of the entity being referenced * @param systemId The system identifier of the entity being referenced * * @exception SAXException if a parsing exception occurs */ public InputSource resolveEntity(String publicId, String systemId) throws SAXException { boolean debug = log.isDebugEnabled(); if (saxLog.isDebugEnabled()) { saxLog.debug("resolveEntity('" + publicId + "', '" + systemId + "')"); } this.publicId = publicId; // Has this system identifier been registered? String entityURL = null; if (publicId != null) { entityURL = (String) entityValidator.get(publicId); } // Redirect the schema/dtd location to a local destination. if (schemaLocation != null && entityValidator != null && entityURL == null){ try { String schemaName = null; String localURI = null; try{ schemaName = systemId.substring(systemId.lastIndexOf("/") + 1); localURI = (String)entityValidator.get(schemaName); } catch(IndexOutOfBoundsException ex){ if (debug) { log.debug(" Not registered, use system identifier"); } return null; } if ( localURI == null ){ if (debug) { log.debug(" Not registered, use system identifier"); } return null; } return new InputSource(localURI); } catch (Exception e) { throw createSAXException(e); } } // Return an input source to our alternative URL if (debug) { log.debug(" Resolving to alternate DTD '" + entityURL + "'"); } try { URL url = new URL(entityURL); InputStream stream = url.openStream(); return (new InputSource(stream)); } catch (Exception e) { throw createSAXException(e); } } /** * Forward notification of a parsing error to the application supplied * error handler (if any). * * @param exception The error information * * @exception SAXException if a parsing exception occurs */ public void error(SAXParseException exception) throws SAXException { log.error("Parse Error at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception); if (errorHandler != null) { errorHandler.error(exception); } } /** * Forward notification of a fatal parsing error to the application * supplied error handler (if any). * * @param exception The fatal error information * * @exception SAXException if a parsing exception occurs */ public void fatalError(SAXParseException exception) throws SAXException { log.error("Parse Fatal Error at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception); if (errorHandler != null) { errorHandler.fatalError(exception); } } /** * Forward notification of a parse warning to the application supplied * error handler (if any). * * @param exception The warning information * * @exception SAXException if a parsing exception occurs */ public void warning(SAXParseException exception) throws SAXException { log.error("Parse Warning at line " + exception.getLineNumber() + " column " + exception.getColumnNumber() + ": " + exception.getMessage(), exception); if (errorHandler != null) { errorHandler.warning(exception); } } /** * Log a message to our associated logger. * * @param message The message to be logged * @deprecated Call getLogger() and use it's logging methods */ public void log(String message) { log.info(message); } /** * Log a message and exception to our associated logger. * * @param message The message to be logged * @deprecated Call getLogger() and use it's logging methods */ public void log(String message, Throwable exception) { log.error(message, exception); } /** * Parse the content of the specified file using this Digester. Returns * the root element from the object stack (if any). * * @param file File containing the XML data to be parsed * * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public Object parse(File file) throws IOException, SAXException { configure(); InputSource input = new InputSource(new FileInputStream(file)); input.setSystemId("file://" + file.getAbsolutePath()); getXMLReader().parse(input); return (root); } /** * Parse the content of the specified input source using this Digester. * Returns the root element from the object stack (if any). * * @param input Input source containing the XML data to be parsed * * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public Object parse(InputSource input) throws IOException, SAXException { configure(); getXMLReader().parse(input); return (root); } /** * Parse the content of the specified input stream using this Digester. * Returns the root element from the object stack (if any). * * @param input Input stream containing the XML data to be parsed * * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public Object parse(InputStream input) throws IOException, SAXException { configure(); InputSource is = new InputSource(input); getXMLReader().parse(is); return (root); } /** * Parse the content of the specified reader using this Digester. * Returns the root element from the object stack (if any). * * @param reader Reader containing the XML data to be parsed * * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public Object parse(Reader reader) throws IOException, SAXException { configure(); InputSource is = new InputSource(reader); getXMLReader().parse(is); return (root); } /** * Parse the content of the specified URI using this Digester. * Returns the root element from the object stack (if any). * * @param uri URI containing the XML data to be parsed * * @exception IOException if an input/output error occurs * @exception SAXException if a parsing exception occurs */ public Object parse(String uri) throws IOException, SAXException { configure(); InputSource is = new InputSource(uri); getXMLReader().parse(is); return (root); } /** * Register the specified DTD URL for the specified public identifier. * This must be called before the first call to <code>parse()</code>. * * @param publicId Public identifier of the DTD to be resolved * @param entityURL The URL to use for reading this DTD */ public void register(String publicId, String entityURL) { if (log.isDebugEnabled()) { log.debug("register('" + publicId + "', '" + entityURL + "'"); } entityValidator.put(publicId, entityURL); } /** * <p>Register a new Rule matching the specified pattern. * This method sets the <code>Digester</code> property on the rule.</p> * * @param pattern Element matching pattern * @param rule Rule to be registered */ public void addRule(String pattern, Rule rule) { rule.setDigester(this); getRules().add(pattern, rule); } /** * Register a set of Rule instances defined in a RuleSet. * * @param ruleSet The RuleSet instance to configure from */ public void addRuleSet(RuleSet ruleSet) { String oldNamespaceURI = getRuleNamespaceURI(); String newNamespaceURI = ruleSet.getNamespaceURI(); if (log.isDebugEnabled()) { if (newNamespaceURI == null) { log.debug("addRuleSet() with no namespace URI"); } else { log.debug("addRuleSet() with namespace URI " + newNamespaceURI); } } setRuleNamespaceURI(newNamespaceURI); ruleSet.addRuleInstances(this); setRuleNamespaceURI(oldNamespaceURI); } /** * Add a "bean property setter" rule for the specified parameters. * * @param pattern Element matching pattern */ public void addBeanPropertySetter(String pattern) { addRule(pattern, new BeanPropertySetterRule()); } /** * Add a "bean property setter" rule for the specified parameters. * * @param pattern Element matching pattern * @param propertyName Name of property to set */ public void addBeanPropertySetter(String pattern, String propertyName) { addRule(pattern, new BeanPropertySetterRule(propertyName)); } /** * Add an "call method" rule for a method which accepts no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called */ public void addCallMethod(String pattern, String methodName) { addRule( pattern, new CallMethodRule(methodName)); } /** * Add an "call method" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero * for a single parameter from the body of this element) */ public void addCallMethod(String pattern, String methodName, int paramCount) { addRule(pattern, new CallMethodRule(methodName, paramCount)); } /** * Add an "call method" rule for the specified parameters. * If <code>paramCount</code> is set to zero the rule will use * the body of the matched element as the single argument of the * method, unless <code>paramTypes</code> is null or empty, in this * case the rule will call the specified method with no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero * for a single parameter from the body of this element) * @param paramTypes Set of Java class names for the types * of the expected parameters * (if you wish to use a primitive type, specify the corresonding * Java wrapper class instead, such as <code>java.lang.Boolean</code> * for a <code>boolean</code> parameter) */ public void addCallMethod(String pattern, String methodName, int paramCount, String paramTypes[]) { addRule(pattern, new CallMethodRule( methodName, paramCount, paramTypes)); } /** * Add an "call method" rule for the specified parameters. * If <code>paramCount</code> is set to zero the rule will use * the body of the matched element as the single argument of the * method, unless <code>paramTypes</code> is null or empty, in this * case the rule will call the specified method with no arguments. * * @param pattern Element matching pattern * @param methodName Method name to be called * @param paramCount Number of expected parameters (or zero * for a single parameter from the body of this element) * @param paramTypes The Java class names of the arguments * (if you wish to use a primitive type, specify the corresonding * Java wrapper class instead, such as <code>java.lang.Boolean</code> * for a <code>boolean</code> parameter) */ public void addCallMethod(String pattern, String methodName, int paramCount, Class paramTypes[]) { addRule(pattern, new CallMethodRule( methodName, paramCount, paramTypes)); } /** * Add a "call parameter" rule for the specified parameters. * * @param pattern Element matching pattern * @param paramIndex Zero-relative parameter index to set * (from the body of this element) */ public void addCallParam(String pattern, int paramIndex) { addRule(pattern, new CallParamRule(paramIndex)); } /** * Add a "call parameter" rule for the specified parameters. * * @param pattern Element matching pattern * @param paramIndex Zero-relative parameter index to set * (from the specified attribute) * @param attributeName Attribute whose value is used as the * parameter value */ public void addCallParam(String pattern, int paramIndex, String attributeName) { addRule(pattern, new CallParamRule(paramIndex, attributeName)); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class */ public void addFactoryCreate(String pattern, String className) { addRule(pattern, new FactoryCreateRule(className)); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class */ public void addFactoryCreate(String pattern, Class clazz) { addRule(pattern, new FactoryCreateRule(clazz)); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name of the object creation factory class * @param attributeName Attribute name which, if present, overrides the * value specified by <code>className</code> */ public void addFactoryCreate(String pattern, String className, String attributeName) { addRule(pattern, new FactoryCreateRule(className, attributeName)); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class of the object creation factory class * @param attributeName Attribute name which, if present, overrides the * value specified by <code>className</code> */ public void addFactoryCreate(String pattern, Class clazz, String attributeName) { addRule(pattern, new FactoryCreateRule(clazz, attributeName)); } /** * Add a "factory create" rule for the specified parameters. * * @param pattern Element matching pattern * @param creationFactory Previously instantiated ObjectCreationFactory * to be utilized */ public void addFactoryCreate(String pattern, ObjectCreationFactory creationFactory) { creationFactory.setDigester(this); addRule(pattern, new FactoryCreateRule(creationFactory)); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Java class name to be created */ public void addObjectCreate(String pattern, String className) { addRule(pattern, new ObjectCreateRule(className)); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param clazz Java class to be created */ public void addObjectCreate(String pattern, Class clazz) { addRule(pattern, new ObjectCreateRule(clazz)); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param className Default Java class name to be created * @param attributeName Attribute name that optionally overrides * the default Java class name to be created */ public void addObjectCreate(String pattern, String className, String attributeName) { addRule(pattern, new ObjectCreateRule(className, attributeName)); } /** * Add an "object create" rule for the specified parameters. * * @param pattern Element matching pattern * @param attributeName Attribute name that optionally overrides * @param clazz Default Java class to be created * the default Java class name to be created */ public void addObjectCreate(String pattern, String attributeName, Class clazz) { addRule(pattern, new ObjectCreateRule(attributeName, clazz)); } /** * Add a "set next" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element */ public void addSetNext(String pattern, String methodName) { addRule(pattern, new SetNextRule(methodName)); } /** * Add a "set next" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @param paramType Java class name of the expected parameter type * (if you wish to use a primitive type, specify the corresonding * Java wrapper class instead, such as <code>java.lang.Boolean</code> * for a <code>boolean</code> parameter) */ public void addSetNext(String pattern, String methodName, String paramType) { addRule(pattern, new SetNextRule(methodName, paramType)); } /** * Add {@link SetRootRule} with the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the root object */ public void addSetRoot(String pattern, String methodName) { addRule(pattern, new SetRootRule(methodName)); } /** * Add {@link SetRootRule} with the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the root object * @param paramType Java class name of the expected parameter type */ public void addSetRoot(String pattern, String methodName, String paramType) { addRule(pattern, new SetRootRule(methodName, paramType)); } /** * Add a "set properties" rule for the specified parameters. * * @param pattern Element matching pattern */ public void addSetProperties(String pattern) { addRule(pattern, new SetPropertiesRule()); } /** * Add a "set properties" rule with a single overridden parameter. * See {@link SetPropertiesRule#SetPropertiesRule(String attributeName, String propertyName)} * * @param pattern Element matching pattern * @param attributeName map this attribute * @param propertyNames to this property */ public void addSetProperties( String pattern, String attributeName, String propertyName) { addRule(pattern, new SetPropertiesRule(attributeName, propertyName)); } /** * Add a "set properties" rule with overridden parameters. * See {@link SetPropertiesRule#SetPropertiesRule(String [] attributeNames, String [] propertyNames)} * * @param pattern Element matching pattern * @param attributeNames names of attributes with custom mappings * @param propertyNames property names these attributes map to */ public void addSetProperties( String pattern, String [] attributeNames, String [] propertyNames) { addRule(pattern, new SetPropertiesRule(attributeNames, propertyNames)); } /** * Add a "set property" rule for the specified parameters. * * @param pattern Element matching pattern * @param name Attribute name containing the property name to be set * @param value Attribute name containing the property value to set */ public void addSetProperty(String pattern, String name, String value) { addRule(pattern, new SetPropertyRule(name, value)); } /** * Add a "set top" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element */ public void addSetTop(String pattern, String methodName) { addRule(pattern, new SetTopRule(methodName)); } /** * Add a "set top" rule for the specified parameters. * * @param pattern Element matching pattern * @param methodName Method name to call on the parent element * @param paramType Java class name of the expected parameter type * (if you wish to use a primitive type, specify the corresonding * Java wrapper class instead, such as <code>java.lang.Boolean</code> * for a <code>boolean</code> parameter) */ public void addSetTop(String pattern, String methodName, String paramType) { addRule(pattern, new SetTopRule(methodName, paramType)); } /** * Clear the current contents of the object stack. */ public void clear() { match = ""; bodyTexts.clear(); params.clear(); publicId = null; stack.clear(); } /** * Return the top object on the stack without removing it. If there are * no objects on the stack, return <code>null</code>. */ public Object peek() { try { return (stack.peek()); } catch (EmptyStackException e) { log.warn("Empty stack (returning null)"); return (null); } } /** * Return the n'th object down the stack, where 0 is the top element * and [getCount()-1] is the bottom element. If the specified index * is out of range, return <code>null</code>. * * @param n Index of the desired element, where 0 is the top of the stack, * 1 is the next element down, and so on. */ public Object peek(int n) { try { return (stack.peek(n)); } catch (EmptyStackException e) { log.warn("Empty stack (returning null)"); return (null); } } /** * Pop the top object off of the stack, and return it. If there are * no objects on the stack, return <code>null</code>. */ public Object pop() { try { return (stack.pop()); } catch (EmptyStackException e) { log.warn("Empty stack (returning null)"); return (null); } } /** * Push a new object onto the top of the object stack. * * @param object The new object */ public void push(Object object) { if (stack.size() == 0) { root = object; } stack.push(object); } /** * When the Digester is being used as a SAXContentHandler, * this method allows you to access the root object that has been * created after parsing. * * @return the root object that has been created after parsing * or null if the digester has not parsed any XML yet. */ public Object getRoot() { return root; } /** * Provide a hook for lazy configuration of this <code>Digester</code> * instance. The default implementation does nothing, but subclasses * can override as needed. */ protected void configure() { // Do not configure more than once if (configured) { return; } // Perform lazy configuration as needed ; // Nothing required by default // Set the configuration flag to avoid repeating configured = true; } /** * Return the set of DTD URL registrations, keyed by public identifier. */ Map getRegistrations() { return (entityValidator); } /** * Return the set of rules that apply to the specified match position. * The selected rules are those that match exactly, or those rules * that specify a suffix match and the tail of the rule matches the * current match position. Exact matches have precedence over * suffix matches, then (among suffix matches) the longest match * is preferred. * * @param match The current match position * * @deprecated Call <code>match()</code> on the <code>Rules</code> * implementation returned by <code>getRules()</code> */ List getRules(String match) { return (getRules().match(match)); } /** * <p>Return the top object on the parameters stack without removing it. If there are * no objects on the stack, return <code>null</code>.</p> * * <p>The parameters stack is used to store <code>CallMethodRule</code> parameters. * See {@link #params}.</p> */ Object peekParams() { try { return (params.peek()); } catch (EmptyStackException e) { log.warn("Empty stack (returning null)"); return (null); } } /** * <p>Return the n'th object down the parameters stack, where 0 is the top element * and [getCount()-1] is the bottom element. If the specified index * is out of range, return <code>null</code>.</p> * * <p>The parameters stack is used to store <code>CallMethodRule</code> parameters. * See {@link #params}.</p> * * @param n Index of the desired element, where 0 is the top of the stack, * 1 is the next element down, and so on. */ Object peekParams(int n) { try { return (params.peek(n)); } catch (EmptyStackException e) { log.warn("Empty stack (returning null)"); return (null); } } /** * <p>Pop the top object off of the parameters stack, and return it. If there are * no objects on the stack, return <code>null</code>.</p> * * <p>The parameters stack is used to store <code>CallMethodRule</code> parameters. * See {@link #params}.</p> */ Object popParams() { try { if (log.isTraceEnabled()) { log.trace("Popping params"); } return (params.pop()); } catch (EmptyStackException e) { log.warn("Empty stack (returning null)"); return (null); } } /** * <p>Push a new object onto the top of the parameters stack.</p> * * <p>The parameters stack is used to store <code>CallMethodRule</code> parameters. * See {@link #params}.</p> * * @param object The new object */ void pushParams(Object object) { if (log.isTraceEnabled()) { log.trace("Pushing params"); } params.push(object); } /** * Create a SAX exception which also understands about the location in * the digester file where the exception occurs * * @return the new exception */ protected SAXException createSAXException(String message, Exception e) { if (locator != null) { String error = "Error at (" + locator.getLineNumber() + ", " + locator.getColumnNumber() + ": " + message; if (e != null) { return new SAXParseException(error, locator, e); } else { return new SAXParseException(error, locator); } } log.error("No Locator!"); if (e != null) { return new SAXException(message, e); } else { return new SAXException(message); } } /** * Create a SAX exception which also understands about the location in * the digester file where the exception occurs * * @return the new exception */ protected SAXException createSAXException(Exception e) { return createSAXException(e.getMessage(), e); } /** * Create a SAX exception which also understands about the location in * the digester file where the exception occurs * * @return the new exception */ protected SAXException createSAXException(String message) { return createSAXException(message, null); } }
package builders; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; import eAdapter.Document; import eAdapter.Representative; /** * * @author Jeff Gillispie * @version December 2016 * * Purpose: Builds documents from a text delimited file. */ public class TextDelimitedBuilder { /** * Builds a list of documents * @param lines the lines parsed from a text delimited file * @param hasHeader indicates if the first line is a header * if there is no header the arbitrary column names will be assigned * in the format "Column 1, Column 2, ..." * @param keyColumnName the name of the column that contains the key * if no header exists the key must be in the first column * @param parentColumnName the name of the column that contains the parent key or blank if none * @param childColumnName the name of the column that contains the child key or blank if none * @param childColumnDelimiter the delimiter used to split child key values * @param repSettings representative settings * @return returns a list of documents */ public List<Document> buildDocuments(List<String[]> lines, boolean hasHeader, String keyColumnName, String parentColumnName, String childColumnName, String childColumnDelimiter, List<RepresentativeSetting> repSettings) { // setup for building String[] header = getHeader(lines.get(0), hasHeader); Map<String, Document> docs = new LinkedHashMap<>(); Map<String, Document> paternity = new HashMap<>(); // childKey >> parentDoc String childSeparator = StringUtils.defaultIfBlank(childColumnDelimiter, ";"); // build the documents for (String[] line : lines) { if (hasHeader) { continue; // skip header line } // build a document Document doc = build(line, header, keyColumnName, repSettings); // set the parent and child values settleFamilyDrama(parentColumnName, childColumnName, childSeparator, doc, docs, paternity); // add the document to the collection docs.put(doc.getKey(), doc); } // check for children that have disowned their parent // this can only be known after all children have been imported if (paternity.size() > 0) { throw new RuntimeException("Broken families, children have disowned their parent."); } return new ArrayList<>(docs.values()); } /** * Builds a document * @param line a text delimited line representing a document * @param header the header which contains the ordered field names * @param keyColumnName the name of the key column * @param representativeSettings representative settings * @return returns a document */ public Document buildDocument(String[] line, String[] header, String keyColumnName, List<RepresentativeSetting> representativeSettings) { // setup for building Document document = new Document(); // check value size matches the header size if (header.length == line.length) { throw new RuntimeException("The value size does not match the header size."); } // populate metadata for (int i = 0; i < line.length; i++) { String fieldName = header[i]; String value = line[i]; document.addField(fieldName, value); } // populate key, if there is no key column name the value in the first column is expected to be the key String keyValue = (!StringUtils.isBlank(keyColumnName)) ? document.getMetadata().get(keyColumnName) : document.getMetadata().get(header[0]); document.setKey(keyValue); // populate representatives if (representativeSettings != null) { // setup for populating representatives Set<Representative> reps = new LinkedHashSet<>(); for (RepresentativeSetting setting : representativeSettings) { // setup for creating rep properties Representative rep = new Representative(); Set<String> files = new LinkedHashSet<>(); // this format will only have one file per rep String file = document.getMetadata().get(setting.getColumn()); files.add(file); // set rep values rep.setType(setting.getType()); rep.setName(setting.getName()); rep.setFiles(files); reps.add(rep); // add rep to the collection } document.setRepresentatives(reps); } // return built document return document; } private void settleFamilyDrama(String parentColumnName, String childColumnName, String childSeparator, Document doc, Map<String, Document> docs, Map<String, Document> paternity) { if (StringUtils.isNotBlank(parentColumnName)) { // if we have a parent column name String parentKey = doc.getMetadata().get(parentColumnName); // check that the parentKey doesn't refer to itself if (parentKey.equals(doc.getKey()) || StringUtils.isBlank(parentKey)) { // the parentid value refers to itself or there is no parent // do nothing here } else { Document parent = docs.get(parentKey); // check if there is no parent if (parent == null) { throw new RuntimeException("Broken families, the parent is missing."); } else { // a parent exists setRelationships(doc, parent); // validate relationships if both parent & child fields exists if (StringUtils.isNotBlank(childColumnName)) { // log paternity so we can check for children who disown their parent String childrenLine = doc.getMetadata().get(childColumnName); if (StringUtils.isNotBlank(childrenLine)) { String[] childKeys = childrenLine.split(childSeparator); // the child docs haven't been added yet so we'll record the relationships and add them later for (String childKey : childKeys) { paternity.put(childKey, doc); // paternity maps childKey >> parentDoc } } // check for relationships that are not reciprocal if (!parent.getMetadata().get(childColumnName).contains(parentKey)) { throw new RuntimeException("Broken families, the parent disowns a child document."); } else { // the relationship is reciprocal // we'll check for orphans later paternity.remove(doc.getKey()); } } } } } else if (StringUtils.isNotBlank(childColumnName)) { // if we don't have a parent column name but we have a child column name String childrenLine = doc.getMetadata().get(childColumnName); if (StringUtils.isBlank(childrenLine)) { // no childrenLine // do nothing here } else { String[] childKeys = childrenLine.split(childSeparator); // the child docs haven't been added yet so we'll record the relationship and add them later for (String childKey : childKeys) { paternity.put(childKey, doc); // paternity maps childKey >> parentDoc } // now check for the paternity of this document and add the parent // paternity maps childKey >> parentDoc if (paternity.containsKey(doc.getKey())) { Document parent = paternity.get(doc.getKey()); // note: the parent doc has already been confirmed setRelationships(doc, parent); paternity.remove(doc.getKey()); // needs to be removed for the disowned parent check } } } else { // no family data // do nothing here } } private void setRelationships(Document doc, Document parent) { doc.setParent(parent); // now add this document as a child to the parent List<Document> children = parent.getChildren(); children.add(doc); parent.setChildren(children); } /** * Gets the header values. * @param headerValues the ordered column names or a line from the text delimited file * @param hasHeader indicates if the headerValues are the header * if they are not the header the column names will be given an arbitrary name * in the following format "Column 1, Column 2, Column 3, ..." * @return returns the ordered column names */ public String[] getHeader(String[] headerValues, boolean hasHeader) { String[] header = new String[headerValues.length]; // check if the supplied values are the header if (hasHeader) { header = headerValues; } else { // create arbitrary column names for (int i = 0; i < headerValues.length; i++) { header[i] = "Column " + i; } } return header; } }
package com.akiban.server; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.TreeMap; import com.akiban.ais.model.AkibanInformationSchema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.akiban.ais.model.Column; import com.akiban.ais.model.GroupTable; import com.akiban.ais.model.Index; import com.akiban.ais.model.IndexColumn; import com.akiban.ais.model.IndexName; import com.akiban.ais.model.Join; import com.akiban.ais.model.JoinColumn; import com.akiban.ais.model.Table; import com.akiban.ais.model.UserTable; import com.akiban.server.service.session.Session; import com.akiban.server.store.SchemaManager; import com.akiban.server.util.RowDefNotFoundException; import com.persistit.exception.PersistitException; /** * Caches RowDef instances. In this incarnation, this class also constructs * RowDef objects from the AkibanInformationSchema. The translation is done in * the {@link #setAIS(AkibanInformationSchema)} method. * * @author peter */ public class RowDefCache { // TODO: For debugging - remove this private static volatile RowDefCache LATEST; private static final Logger LOG = LoggerFactory.getLogger(RowDefCache.class .getName()); private final Map<Integer, RowDef> cacheMap = new TreeMap<Integer, RowDef>(); private final Map<String, Integer> nameMap = new TreeMap<String, Integer>(); private int hashCode; { LATEST = this; } public static RowDefCache latest() { return LATEST; } public synchronized boolean contains(final int rowDefId) { return cacheMap.containsKey(Integer.valueOf(rowDefId)); } /** * Look up and return a RowDef for a supplied rowDefId value. * * @param rowDefId * @return the corresponding RowDef * @throws RowDefNotFoundException if there is no such RowDef. */ public synchronized RowDef getRowDef(final int rowDefId) throws RowDefNotFoundException { RowDef rowDef = rowDef(rowDefId); if (rowDef == null) { throw new RowDefNotFoundException(rowDefId); } return rowDef; } /** * @param rowDefId * @return the corresponding RowDef object, or <code>null</code> if * there is RowDef defined with the specified id */ public synchronized RowDef rowDef(final int rowDefId) { return cacheMap.get(Integer.valueOf(rowDefId)); } public synchronized List<RowDef> getRowDefs() { return new ArrayList<RowDef>(cacheMap.values()); } public synchronized RowDef getRowDef(final String tableName) throws RowDefNotFoundException { final Integer key = nameMap.get(tableName); if (key == null) { return null; } return getRowDef(key.intValue()); } /** * Given a schema and table name, gets a string that uniquely identifies a * table. This string can then be passed to {@link #getRowDef(String)}. * * @param schema * the schema * @param table * the table name * @return a unique form */ public static String nameOf(String schema, String table) { assert schema != null; assert table != null; return schema + "." + table; } public synchronized void clear() { cacheMap.clear(); nameMap.clear(); hashCode = 0; } /** * Receive an instance of the AkibanInformationSchema, crack it and produce * the RowDef instances it defines. * * @param ais */ public synchronized void setAIS(final AkibanInformationSchema ais) { for (final UserTable table : ais.getUserTables().values()) { putRowDef(createUserTableRowDef(ais, table)); } for (final GroupTable table : ais.getGroupTables().values()) { putRowDef(createGroupTableRowDef(ais, table)); } analyzeAll(); if (LOG.isDebugEnabled()) { LOG.debug(toString()); } hashCode = cacheMap.hashCode(); } /** * Assign "ordinal" values to user table RowDef instances. An ordinal the * integer used to identify a user table subtree within an hkey. This method * Assigned unique integers where needed to any tables that have not already * received non-zero ordinal values. Once a table is populated, its ordinal * is written as part of the TableStatus record, and on subsequent server * start-ups, that value is loaded and reused from the status tree. * * Consequently it is necessary to invoke * {@link SchemaManager#loadTableStatusRecords(Session)} before this method * is called; otherwise the wrong ordinal values are likely to be assigned. * This sequence is validated by asserting that the TableStatus whose * ordinal is to be assigned may not be "dirty". A newly constructed * TableStatus is dirty; one that has been validated through the * loadTableStatusRecords method is not dirty. * * @param schemaManager * @throws PersistitException */ public synchronized void fixUpOrdinals(SchemaManager schemaManager) throws PersistitException { for (final RowDef groupRowDef : getRowDefs()) { if (groupRowDef.isGroupTable()) { // groupTable has no ordinal final HashSet<Integer> assigned = new HashSet<Integer>(); // First pass: merge already assigned values for (final RowDef userRowDef : groupRowDef .getUserTableRowDefs()) { final TableStatus tableStatus = userRowDef.getTableStatus(); // Ensure that the loadTableStatusRecords method was called // before this. assert !tableStatus.isDirty(); int ordinal = tableStatus == null ? 0 : tableStatus .getOrdinal(); if (ordinal != 0 && userRowDef.getOrdinal() != 0 && tableStatus.getOrdinal() != userRowDef .getOrdinal()) { throw new IllegalStateException(String.format( "Mismatched ordinals: %s and %s", userRowDef.getOrdinal(), tableStatus.getOrdinal())); } if (ordinal != 0) { userRowDef.setOrdinal(ordinal); } else if (userRowDef.getOrdinal() != 0 && tableStatus.getOrdinal() == 0) { ordinal = userRowDef.getOrdinal(); tableStatus.setOrdinal(ordinal); } if (ordinal != 0 && !assigned.add(ordinal)) { throw new IllegalStateException(String.format( "Non-unique ordinal value %s added to %s", ordinal, assigned)); } } int nextOrdinal = 1; for (final RowDef userRowDef : groupRowDef .getUserTableRowDefs()) { if (userRowDef.getOrdinal() == 0) { // find an unassigned value. Here we could try to // optimize layout // by assigning "bushy" values in some optimal pattern // (if we knew that was...) for (; assigned.contains(nextOrdinal); nextOrdinal++) { } userRowDef.setOrdinal(nextOrdinal); assigned.add(nextOrdinal); } } if (assigned.size() != groupRowDef.getUserTableRowDefs().length) { throw new IllegalStateException(String.format( "Inconsistent ordinal number assignments: %s", assigned)); } } } } private static String getTreeName(GroupTable groupTable) { return groupTable.getName().toString(); } private static String getTreeName(String groupName, Index index) { IndexName iname = index.getIndexName(); String schemaName = iname.getSchemaName(); String tableName = iname.getTableName(); String indexName = iname.getName(); // Tree names for identical indexes on the group and user table must match. // Check if this index originally came from a user table and, if so, use their // names instead. if (index.getTable().isGroupTable()) { Column c = index.getColumns().get(0).getColumn().getUserColumn(); if (c != null) { UserTable table = c.getUserTable(); for(Index i : table.getIndexes()) { if(i.getIndexId().equals(index.getIndexId())) { tableName = table.getName().getTableName(); indexName = i.getIndexName().getName(); break; } } } } return String.format("%s$$%s$$%s$$%s", groupName, schemaName, tableName, indexName); } private RowDef createUserTableRowDef(AkibanInformationSchema ais, UserTable table) { RowDef rowDef = new RowDef(table); // parentRowDef int[] parentJoinFields; if (table.getParentJoin() != null) { final Join join = table.getParentJoin(); // parentJoinFields - TODO - not sure this is right. parentJoinFields = new int[join.getJoinColumns().size()]; for (int index = 0; index < join.getJoinColumns().size(); index++) { final JoinColumn joinColumn = join.getJoinColumns().get(index); parentJoinFields[index] = joinColumn.getChild().getPosition(); } } else { parentJoinFields = new int[0]; } // root table UserTable root = table; while (root.getParentJoin() != null) { root = root.getParentJoin().getParent(); } // group table name String groupTableName = null; String groupTableTreeName = null; for (final GroupTable groupTable : ais.getGroupTables().values()) { if (groupTable.getRoot().equals(root)) { groupTableName = groupTable.getName().getTableName(); groupTableTreeName = getTreeName(groupTable); } } assert groupTableName != null : root; assert groupTableTreeName != null : root; // Secondary indexes List<IndexDef> indexDefList = new ArrayList<IndexDef>(); for (Index index : table.getIndexesIncludingInternal()) { List<IndexColumn> indexColumns = index.getColumns(); if (!indexColumns.isEmpty()) { String treeName = getTreeName(groupTableName, index); IndexDef indexDef = new IndexDef(treeName, rowDef, index); if (index.isPrimaryKey()) { indexDefList.add(0, indexDef); } else { indexDefList.add(indexDef); } } // else: Don't create an index for an artificial IndexDef that has // no fields. } rowDef.setTreeName(groupTableTreeName); rowDef.setParentJoinFields(parentJoinFields); rowDef.setIndexDefs(indexDefList.toArray(new IndexDef[indexDefList.size()])); rowDef.setOrdinal(0); return rowDef; } private RowDef createGroupTableRowDef(AkibanInformationSchema ais, GroupTable table) { RowDef rowDef = new RowDef(table); List<Integer> userTableRowDefIds = new ArrayList<Integer>(); for (Column column : table.getColumnsIncludingInternal()) { Column userColumn = column.getUserColumn(); if (userColumn.getPosition() == 0) { int userRowDefId = userColumn.getTable().getTableId(); userTableRowDefIds.add(userRowDefId); RowDef userRowDef = cacheMap.get(userRowDefId); userRowDef.setColumnOffset(column.getPosition()); } } RowDef[] userTableRowDefs = new RowDef[userTableRowDefIds.size()]; int i = 0; for (Integer userTableRowDefId : userTableRowDefIds) { userTableRowDefs[i++] = cacheMap.get(userTableRowDefId); } final String groupTableName = table.getName().getTableName(); final String groupTableTreeName = getTreeName(table); // Secondary indexes final List<IndexDef> indexDefList = new ArrayList<IndexDef>(); for (Index index : table.getIndexes()) { List<IndexColumn> indexColumns = index.getColumns(); if (!indexColumns.isEmpty()) { String treeName = getTreeName(groupTableName, index); IndexDef indexDef = new IndexDef(treeName, rowDef, index); indexDefList.add(indexDef); } // else: Don't create a group table index for an artificial // IndeDef that has no fields. } rowDef.setTreeName(groupTableTreeName); rowDef.setUserTableRowDefs(userTableRowDefs); rowDef.setIndexDefs(indexDefList.toArray(new IndexDef[indexDefList .size()])); return rowDef; } RowDef lookUpRowDef(final int rowDefId) throws RowDefNotFoundException { throw new RowDefNotFoundException(rowDefId); } /** * Adds a RowDef preemptively to the cache. This is intended primarily to * simplify unit tests. * * @param rowDef */ public synchronized void putRowDef(final RowDef rowDef) { final Integer key = rowDef.getRowDefId(); final String name = nameOf(rowDef.getSchemaName(), rowDef.getTableName()); if (cacheMap.containsKey(key) || nameMap.containsKey(name)) { throw new IllegalStateException("RowDef " + rowDef + " already exists"); } cacheMap.put(key, rowDef); nameMap.put(name, key); } @Override public String toString() { final StringBuilder sb = new StringBuilder("\n"); for (Map.Entry<String, Integer> entry : nameMap.entrySet()) { final RowDef rowDef = cacheMap.get(entry.getValue()); sb.append(" "); sb.append(rowDef); sb.append("\n"); } return sb.toString(); } public void analyzeAll() throws RowDefNotFoundException { for (final RowDef rowDef : cacheMap.values()) { analyze(rowDef); } } void analyze(final RowDef rowDef) throws RowDefNotFoundException { rowDef.computeRowDefType(this); rowDef.computeFieldAssociations(this); } RowDef rowDef(Table table) { for (RowDef rowDef : cacheMap.values()) { if (rowDef.table() == table) { return rowDef; } } return null; } @Override public boolean equals(final Object o) { final RowDefCache cache = (RowDefCache) o; return cacheMap.equals(cache.cacheMap); } @Override public int hashCode() { return hashCode; } }
package com.algolia.search.saas; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.List; import org.json.JSONArray; public class Query { public enum QueryType { /// all query words are interpreted as prefixes. PREFIX_ALL, /// only the last word is interpreted as a prefix (default behavior). PREFIX_LAST, /// no query word is interpreted as a prefix. This option is not recommended. PREFIX_NONE } protected List<String> attributes; protected List<String> attributesToHighlight; protected List<String> attributesToSnippet; protected int minWordSizeForApprox1; protected int minWordSizeForApprox2; protected boolean getRankingInfo; protected boolean distinct; protected boolean advancedSyntax; protected int page; protected int hitsPerPage; protected String restrictSearchableAttributes; protected String tags; protected String numerics; protected String insideBoundingBox; protected String aroundLatLong; protected boolean aroundLatLongViaIP; protected String query; protected QueryType queryType; protected String optionalWords; protected String facets; protected String facetsFilter; protected int maxNumberOfFacets; protected boolean analytics; protected boolean synonyms; protected boolean replaceSynonyms; protected boolean typoTolerance; protected boolean allowTyposOnNumericTokens; public Query(String query) { minWordSizeForApprox1 = 3; minWordSizeForApprox2 = 7; getRankingInfo = false; distinct = false; page = 0; hitsPerPage = 20; this.query = query; queryType = QueryType.PREFIX_LAST; maxNumberOfFacets = -1; advancedSyntax = false; analytics = synonyms = replaceSynonyms = typoTolerance = allowTyposOnNumericTokens = true; } public Query() { minWordSizeForApprox1 = 3; minWordSizeForApprox2 = 7; getRankingInfo = false; distinct = false; page = 0; hitsPerPage = 20; queryType = QueryType.PREFIX_ALL; maxNumberOfFacets = -1; advancedSyntax = false; analytics = synonyms = replaceSynonyms = typoTolerance = allowTyposOnNumericTokens = true; } /** * List of object attributes you want to use for textual search (must be a subset of the attributesToIndex * index setting). Attributes are separated with a comma (for example @"name,address"). * You can also use a JSON string array encoding (for example encodeURIComponent("[\"name\",\"address\"]")). * By default, all attributes specified in attributesToIndex settings are used to search. */ public Query restrictSearchableAttributes(String attributes) { this.restrictSearchableAttributes = attributes; return this; } /** * Select how the query words are interpreted: */ public Query setQueryType(QueryType type) { this.queryType = type; return this; } /** * Set the full text query */ public Query setQueryString(String query) { this.query = query; return this; } /** * Specify the list of attribute names to retrieve. * By default all attributes are retrieved. */ public Query setAttributesToRetrieve(List<String> attributes) { this.attributes = attributes; return this; } /** * Specify the list of attribute names to highlight. * By default indexed attributes are highlighted. */ public Query setAttributesToHighlight(List<String> attributes) { this.attributesToHighlight = attributes; return this; } /** * Specify the list of attribute names to Snippet alongside the number of words to return (syntax is 'attributeName:nbWords'). * By default no snippet is computed. */ public Query setAttributesToSnippet(List<String> attributes) { this.attributesToSnippet = attributes; return this; } /** * * @param If set to true, enable the distinct feature (disabled by default) if the attributeForDistinct index setting is set. * This feature is similar to the SQL "distinct" keyword: when enabled in a query with the distinct=1 parameter, * all hits containing a duplicate value for the attributeForDistinct attribute are removed from results. * For example, if the chosen attribute is show_name and several hits have the same value for show_name, then only the best * one is kept and others are removed. */ public Query enableDistinct(boolean distinct) { this.distinct = distinct; return this; } /** * @param If set to false, this query will not be taken into account in analytics feature. Default to true. */ public Query enableAnalytics(boolean enabled) { this.analytics = enabled; return this; } /** * @param If set to false, this query will not use synonyms defined in configuration. Default to true. */ public Query enableSynonyms(boolean enabled) { this.synonyms = enabled; return this; } /** * @param If set to false, words matched via synonyms expansion will not be replaced by the matched synonym in highlight result. Default to true. */ public Query enableReplaceSynonymsInHighlight(boolean enabled) { this.replaceSynonyms = enabled; return this; } /** * @param If set to false, disable typo-tolerance. Default to true. */ public Query enableTypoTolerance(boolean enabled) { this.typoTolerance = enabled; return this; } /** * Specify the minimum number of characters in a query word to accept one typo in this word. * Defaults to 3. */ public Query setMinWordSizeToAllowOneTypo(int nbChars) { minWordSizeForApprox1 = nbChars; return this; } /** * Specify the minimum number of characters in a query word to accept two typos in this word. * Defaults to 7. */ public Query setMinWordSizeToAllowTwoTypos(int nbChars) { minWordSizeForApprox2 = nbChars; return this; } /** * @param If set to false, disable typo-tolerance on numeric tokens. Default to true. */ public Query enableTyposOnNumericTokens(boolean enabled) { this.allowTyposOnNumericTokens = enabled; return this; } /** * if set, the result hits will contain ranking information in _rankingInfo attribute. */ public Query getRankingInfo(boolean enabled) { getRankingInfo = enabled; return this; } /** * Set the page to retrieve (zero base). Defaults to 0. */ public Query setPage(int page) { this.page = page; return this; } /** * Set the number of hits per page. Defaults to 10. */ public Query setHitsPerPage(int nbHitsPerPage) { this.hitsPerPage = nbHitsPerPage; return this; } /** * Set the number of hits per page. Defaults to 10. * @deprecated Use {@code setHitsPerPage} */ @Deprecated public Query setNbHitsPerPage(int nbHitsPerPage) { return setHitsPerPage(nbHitsPerPage); } /** * Search for entries around a given latitude/longitude. * @param radius set the maximum distance in meters. * Note: at indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}}) */ public Query aroundLatitudeLongitude(float latitude, float longitude, int radius) { aroundLatLong = "aroundLatLng=" + latitude + "," + longitude + "&aroundRadius=" + radius; return this; } /** * Search for entries around a given latitude/longitude. * @param radius set the maximum distance in meters. * @param precision set the precision for ranking (for example if you set precision=100, two objects that are distant of less than 100m will be considered as identical for "geo" ranking parameter). * Note: at indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}}) */ public Query aroundLatitudeLongitude(float latitude, float longitude, int radius, int precision) { aroundLatLong = "aroundLatLng=" + latitude + "," + longitude + "&aroundRadius=" + radius + "&aroundPrecision=" + precision; return this; } /** * Search for entries around the latitude/longitude of user (using IP geolocation) * @param radius set the maximum distance in meters. * Note: at indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}}) */ public Query aroundLatitudeLongitudeViaIP(boolean enabled, int radius) { aroundLatLong = "aroundRadius=" + radius; aroundLatLongViaIP = enabled; return this; } /** * Search for entries around the latitude/longitude of user (using IP geolocation) * @param radius set the maximum distance in meters. * @param precision set the precision for ranking (for example if you set precision=100, two objects that are distant of less than 100m will be considered as identical for "geo" ranking parameter). * Note: at indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}}) */ public Query aroundLatitudeLongitudeViaIP(boolean enabled, int radius, int precision) { aroundLatLong = "aroundRadius=" + radius + "&aroundPrecision=" + precision; aroundLatLongViaIP = enabled; return this; } /** * Search for entries inside a given area defined by the two extreme points of a rectangle. * At indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}}) */ public Query insideBoundingBox(float latitudeP1, float longitudeP1, float latitudeP2, float longitudeP2) { insideBoundingBox = "insideBoundingBox=" + latitudeP1 + "," + longitudeP1 + "," + latitudeP2 + "," + longitudeP2; return this; } /** * Set the list of words that should be considered as optional when found in the query. * @param words The list of optional words, comma separated. */ public Query setOptionalWords(String words) { this.optionalWords = words; return this; } /** * Set the list of words that should be considered as optional when found in the query. * @param words The list of optional words. */ public Query setOptionalWords(List<String> words) { StringBuilder builder = new StringBuilder(); for (String word : words) { builder.append(word); builder.append(","); } this.optionalWords = builder.toString(); return this; } public Query setFacetFilters(List<String> facets) { JSONArray obj = new JSONArray(); for (String facet : facets) { obj.put(facet); } this.facetsFilter = obj.toString(); return this; } public Query setFacetFilters(String facets) { facetsFilter = facets; return this; } /** * List of object attributes that you want to use for faceting. <br/> * Only attributes that have been added in **attributesForFaceting** index setting can be used in this parameter. * You can also use `*` to perform faceting on all attributes specified in **attributesForFaceting**. */ public Query setFacets(List<String> facets) { JSONArray obj = new JSONArray(); for (String facet : facets) { obj.put(facet); } this.facets = obj.toString(); return this; } /** * Limit the number of facet values returned for each facet. */ public Query setMaxNumberOfFacets(int n) { this.maxNumberOfFacets = n; return this; } /** * Filter the query by a set of tags. You can AND tags by separating them by commas. To OR tags, you must add parentheses. For example tag1,(tag2,tag3) means tag1 AND (tag2 OR tag3). * At indexing, tags should be added in the _tags attribute of objects (for example {"_tags":["tag1","tag2"]} ) */ public Query setTagFilters(String tags) { this.tags = tags; return this; } /** * Add a list of numeric filters separated by a comma. * The syntax of one filter is `attributeName` followed by `operand` followed by `value. Supported operands are `<`, `<=`, `=`, `>` and `>=`. * You can have multiple conditions on one attribute like for example `numerics=price>100,price<1000`. */ public Query setNumericFilters(String numerics) { this.numerics = numerics; return this; } /** * Add a list of numeric filters separated by a comma. * The syntax of one filter is `attributeName` followed by `operand` followed by `value. Supported operands are `<`, `<=`, `=`, `>` and `>=`. * You can have multiple conditions on one attribute like for example `numerics=price>100,price<1000`. */ public Query setNumericFilters(List<String> numerics) { StringBuilder builder = new StringBuilder(); boolean first = true; for (String n : numerics) { if (!first) builder.append(","); builder.append(n); first = false; } this.numerics = builder.toString(); return this; } /** * Enable the advanced query syntax. Defaults to false. * - Phrase query: a phrase query defines a particular sequence of terms. * A phrase query is build by Algolia's query parser for words surrounded by ". * For example, "search engine" will retrieve records having search next to engine only. * Typo-tolerance is disabled on phrase queries. * - Prohibit operator: The prohibit operator excludes records that contain the term after the - symbol. * For example search -engine will retrieve records containing search but not engine. */ public Query enableAvancedSyntax(boolean advancedSyntax) { this.advancedSyntax = advancedSyntax; return this; } protected String getQueryString() { StringBuilder stringBuilder = new StringBuilder(); try { if (attributes != null) { stringBuilder.append("attributes="); boolean first = true; for (String attr : this.attributes) { if (!first) stringBuilder.append(","); stringBuilder.append(URLEncoder.encode(attr, "UTF-8")); first = false; } } if (attributesToHighlight != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("attributesToHighlight="); boolean first = true; for (String attr : this.attributesToHighlight) { if (!first) stringBuilder.append(','); stringBuilder.append(URLEncoder.encode(attr, "UTF-8")); first = false; } } if (attributesToSnippet != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("attributesToSnippet="); boolean first = true; for (String attr : this.attributesToSnippet) { if (!first) stringBuilder.append(','); stringBuilder.append(URLEncoder.encode(attr, "UTF-8")); first = false; } } if (!typoTolerance) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("typoTolerance=false"); } if (!allowTyposOnNumericTokens) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("allowTyposOnNumericTokens=false"); } if (minWordSizeForApprox1 != 3) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("minWordSizefor1Typo="); stringBuilder.append(minWordSizeForApprox1); } if (minWordSizeForApprox2 != 7) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("minWordSizefor2Typos="); stringBuilder.append(minWordSizeForApprox2); } if (getRankingInfo) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("getRankingInfo=1"); } if (!analytics) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("analytics=0"); } if (!synonyms) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("synonyms=0"); } if (!replaceSynonyms) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("replaceSynonymsInHighlight=0"); } if (distinct) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("distinct=1"); } if (advancedSyntax) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("advancedSyntax=1"); } if (page > 0) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("page="); stringBuilder.append(page); } if (hitsPerPage != 20 && hitsPerPage > 0) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("hitsPerPage="); stringBuilder.append(hitsPerPage); } if (tags != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("tagFilters="); stringBuilder.append(URLEncoder.encode(tags, "UTF-8")); } if (numerics != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("numericFilters="); stringBuilder.append(URLEncoder.encode(numerics, "UTF-8")); } if (insideBoundingBox != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append(insideBoundingBox); } else if (aroundLatLong != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append(aroundLatLong); } if (aroundLatLongViaIP) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("aroundLatLngViaIP=true"); } if (query != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("query="); stringBuilder.append(URLEncoder.encode(query, "UTF-8")); } if (facets != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("facets="); stringBuilder.append(URLEncoder.encode(facets, "UTF-8")); } if (facetsFilter != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("facetFilters="); stringBuilder.append(URLEncoder.encode(facetsFilter, "UTF-8")); } if (maxNumberOfFacets > 0) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("maxNumberOfFacets="); stringBuilder.append(maxNumberOfFacets); } if (optionalWords != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("optionalWords="); stringBuilder.append(URLEncoder.encode(optionalWords, "UTF-8")); } if (restrictSearchableAttributes != null) { if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("restrictSearchableAttributes="); stringBuilder.append(URLEncoder.encode(restrictSearchableAttributes, "UTF-8")); } switch (queryType) { case PREFIX_ALL: if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("queryType=prefixAll"); break; case PREFIX_LAST: break; case PREFIX_NONE: if (stringBuilder.length() > 0) stringBuilder.append('&'); stringBuilder.append("queryType=prefixNone"); break; } } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } return stringBuilder.toString(); } }
package com.ezardlabs.lostsector; import com.ezardlabs.dethsquare.Animator; import com.ezardlabs.dethsquare.Collider; import com.ezardlabs.dethsquare.GameObject; import com.ezardlabs.dethsquare.LevelManager; import com.ezardlabs.dethsquare.Renderer; import com.ezardlabs.dethsquare.Rigidbody; import com.ezardlabs.dethsquare.TextureAtlas; import com.ezardlabs.dethsquare.multiplayer.NetworkAnimator; import com.ezardlabs.dethsquare.multiplayer.NetworkRenderer; import com.ezardlabs.dethsquare.multiplayer.NetworkTransform; import com.ezardlabs.dethsquare.prefabs.PrefabManager; import com.ezardlabs.dethsquare.util.BaseGame; import com.ezardlabs.lostsector.levels.ExploreLevel; import com.ezardlabs.lostsector.levels.MainMenuLevel; import com.ezardlabs.lostsector.levels.MultiplayerLevel; import com.ezardlabs.lostsector.levels.MultiplayerLobbyLevel; import com.ezardlabs.lostsector.levels.ProceduralLevel; import com.ezardlabs.lostsector.levels.SurvivalLevel; import com.ezardlabs.lostsector.objects.Player; import com.ezardlabs.lostsector.objects.enemies.corpus.crewmen.DeraCrewman; import com.ezardlabs.lostsector.objects.enemies.corpus.crewmen.ProvaCrewman; import com.ezardlabs.lostsector.objects.enemies.corpus.crewmen.SupraCrewman; import com.ezardlabs.lostsector.objects.environment.Door; import com.ezardlabs.lostsector.objects.environment.LaserDoor; import com.ezardlabs.lostsector.objects.environment.Locker; import com.ezardlabs.lostsector.objects.hud.HUD; import com.ezardlabs.lostsector.objects.projectiles.LankaBeam; import com.ezardlabs.lostsector.objects.warframes.Frost; public class Game extends BaseGame { public static GameObject[] players; public enum DamageType { NORMAL, SLASH, COLD, KUBROW } @Override public void create() { LevelManager.registerLevel("explore", new ExploreLevel()); LevelManager.registerLevel("survival", new SurvivalLevel()); LevelManager.registerLevel("procedural", new ProceduralLevel()); LevelManager.registerLevel("multiplayer_lobby", new MultiplayerLobbyLevel()); LevelManager.registerLevel("multiplayer", new MultiplayerLevel()); LevelManager.registerLevel("main_menu", new MainMenuLevel()); registerPlayerPrefabs(); registerProjectilePrefabs(); registerDoorPrefabs(); registerLockerPrefabs(); registerEnemyPrefabs(); LevelManager.loadLevel("main_menu"); } private void registerPlayerPrefabs() { PrefabManager.addPrefab("player", () -> new GameObject("Player", "player", new Player(), new HUD(), new Renderer(), new Animator(), new Frost(), new Collider(200, 200), new Rigidbody(), new NetworkTransform(), new NetworkRenderer(), new NetworkAnimator()), () -> new GameObject("Other Player", "player", new Renderer(), new Animator(), new Frost(), new Collider(200, 200), new NetworkTransform(), new NetworkRenderer(), new NetworkAnimator())); } private void registerProjectilePrefabs() { PrefabManager.addPrefab("lanka_beam", () -> new GameObject("Lanka Beam", new Renderer("images/blue.png", 0, 0), new LankaBeam(500), new NetworkTransform(), new NetworkRenderer()), () -> new GameObject("Lanka Beam", new Renderer("images/blue.png", 0, 0), new NetworkTransform(), new NetworkRenderer())); } private void registerDoorPrefabs() { PrefabManager.addPrefab("door", () -> new GameObject("Door", true, new Door( new TextureAtlas("images/environment/atlas.png", "images/environment/atlas.txt")), new Renderer(), new Animator(), new Collider(100, 500, true))); PrefabManager.addPrefab("laser_door", () -> new GameObject("Laser Door", true, new LaserDoor(new TextureAtlas("images/environment/atlas.png", "images/environment/atlas.txt")), new Renderer(), new Animator(), new Collider(100, 500, true))); } private void registerLockerPrefabs() { PrefabManager.addPrefab("locker_locked", () -> new GameObject("Locker", true, new Renderer(), new Locker(true, new TextureAtlas("images/environment/atlas.png", "images/environment/atlas.txt")))); PrefabManager.addPrefab("locker_unlocked", () -> new GameObject("Locker", true, new Renderer(), new Locker(false, new TextureAtlas("images/environment/atlas.png", "images/environment/atlas.txt")), new Collider(100, 200, true), new Animator(), new NetworkAnimator()), () -> new GameObject("Locker", true, new Renderer(), new Locker(false, new TextureAtlas("images/environment/atlas.png", "images/environment/atlas.txt")), new Animator(), new NetworkAnimator())); } private void registerEnemyPrefabs() { PrefabManager.addPrefab("dera_crewman", () -> new GameObject("Dera Crewman", new Renderer(), new Animator(), new Collider(200, 200), new Rigidbody(), new DeraCrewman(), new NetworkTransform(), new NetworkRenderer(), new NetworkAnimator()), () -> new GameObject("Dera Crewman", new Renderer(), new Animator(), new Collider(200, 200), new NetworkTransform(), new NetworkRenderer(), new NetworkAnimator())); PrefabManager.addPrefab("prova_crewman", () -> new GameObject("Prova Crewman", new Renderer(), new Animator(), new Collider(200, 200), new Rigidbody(), new ProvaCrewman(), new NetworkTransform(), new NetworkRenderer(), new NetworkAnimator()), () -> new GameObject("Prova Crewman", new Renderer(), new Animator(), new Collider(200, 200), new NetworkTransform(), new NetworkRenderer(), new NetworkAnimator())); PrefabManager.addPrefab("supra_crewman", () -> new GameObject("Supra Crewman", new Renderer(), new Animator(), new Collider(200, 200), new Rigidbody(), new SupraCrewman(), new NetworkTransform(), new NetworkRenderer(), new NetworkAnimator()), () -> new GameObject("Supra Crewman", new Renderer(), new Animator(), new Collider(200, 200), new NetworkTransform(), new NetworkRenderer(), new NetworkAnimator())); } }
package com.fishercoder.solutions; public class _29 { public static class Solution1 { public int divide(int dividend, int divisor) { if (divisor == 0 || (dividend == Integer.MIN_VALUE && divisor == -1)) { return Integer.MAX_VALUE; } if (dividend != Integer.MIN_VALUE && Math.abs(dividend) < Math.abs(divisor)) { return 0; } if (divisor == Integer.MIN_VALUE) { return (dividend == Integer.MIN_VALUE) ? 1 : 0; } boolean flag = (dividend < 0) ^ (divisor < 0); dividend = -Math.abs(dividend); divisor = -Math.abs(divisor); int[] num = new int[40]; int[] multiple = new int[40]; num[1] = divisor; multiple[1] = 1; for (int i = 2; i < 32 && num[i - 1] < 0; ++i) { num[i] = num[i - 1] << 1; multiple[i] = multiple[i - 1] << 1; } int result = 0; int index = 1; while (num[index] < 0) { ++index; } index -= 1; while (dividend <= divisor) { while (dividend <= num[index]) { result += multiple[index]; dividend -= num[index]; } --index; } return !flag ? result : -result; } } }
package ljdp.minechem.api.core; import static ljdp.minechem.api.core.EnumElement.*; import java.util.ArrayList; import java.util.Random; // import ljdp.minechem.api.recipe.DecomposerRecipe; // import ljdp.minechem.api.recipe.SynthesisRecipe; // Na2OLi2O(SiO2)2(B2O3)3H2O // MOLECULE IDS MUST BE CONTINIOUS OTHERWISE THE ARRAY WILL BE MISALIGNED. public enum EnumMolecule { cellulose(0, "Cellulose", 0, 1, 0, 0, 0.25F, 0, new Element(C, 6), new Element(H, 10), new Element(O, 5)), water(1, "Water", 0, 0, 1, 0, 0, 1, new Element(H, 2), new Element(O)), carbonDioxide(2, "Carbon Dioxide", 0.5F, 0.5F, 0.5F, 0.25F, 0.25F, 0.25F, new Element(C), new Element(O, 2)), nitrogenDioxide(3, "Nitrogen Dioxide", 1, 0.65F, 0, 0.5F, 0.1412F, 0.1843F, new Element(N), new Element(O, 2)), toluene(4, "Toluene", 1, 1, 1, 0.8F, 0.8F, 0.8F, new Element(C, 7), new Element(H, 8)), potassiumNitrate(5, "Potassium Nitrate", 0.9F, 0.9F, 0.9F, 0.8F, 0.8F, 0.8F, new Element(K), new Element(N), new Element(O, 3)), tnt(6, "Trinitrotoluene", 1, 1, 0, 1, 0.65F, 0, new Element(C, 6), new Element(H, 2), new Molecule(nitrogenDioxide, 3), new Molecule(toluene)), siliconDioxide(7, "Silicon Dioxide", 1, 1, 1, 1, 1, 1, new Element(Si), new Element(O, 2)), calcite(8, "Calcite", new Element(Ca), new Element(C), new Element(O, 3)), pyrite(9, "Pyrite", new Element(Fe), new Element(S, 2)), nepheline(10, "Nepheline", new Element(Al), new Element(Si), new Element(O, 4)), sulfate(11, "Sulfate (ion)", new Element(S), new Element(O, 4)), noselite(12, "Noselite", new Element(Na, 8), new Molecule(nepheline, 6), new Molecule(sulfate)), sodalite(13, "Sodalite", new Element(Na, 8), new Molecule(nepheline, 6), new Element(Cl, 2)), nitrate(14, "Nitrate (ion)", new Element(N), new Element(O, 3)), carbonate(15, "Carbonate (ion)", new Element(C), new Element(O, 3)), cyanide(16, "Potassium Cyanide", new Element(K), new Element(C), new Element(N)), phosphate(17, "Phosphate (ion)", new Element(P), new Element(O, 4)), acetate(18, "Acetate (ion)", new Element(C, 2), new Element(H, 3), new Element(O, 2)), chromate(19, "Chromate (ion)", new Element(Cr), new Element(O, 4)), hydroxide(20, "Hydroxide (ion)", new Element(O), new Element(H)), ammonium(21, "Ammonium (ion)", new Element(N), new Element(H, 4)), hydronium(22, "Hydronium (ion)", new Element(H, 3), new Element(O)), peroxide(23, "Hydrogen Peroxide", new Element(H, 2), new Element(O, 2)), calciumOxide(24, "Calcium Oxide", new Element(Ca), new Element(O)), calciumCarbonate(25, "Calcium Carbonate", new Element(Ca), new Molecule(carbonate)), magnesiumCarbonate(26, "Magnesium Carbonate", new Element(Mg), new Molecule(carbonate)), lazurite(27, "Lazurite", new Element(Na, 8), new Molecule(nepheline), new Molecule(sulfate)), isoprene(28, "Isoprene", new Element(C, 5), new Element(H, 8)), butene(29, "Butene", new Element(C, 4), new Element(H, 8)), polyisobutylene(30, "Polyisobutylene Rubber", new Molecule(butene, 16), new Molecule(isoprene)), malicAcid(31, "Malic Acid", new Element(C, 4), new Element(H, 6), new Element(O, 5)), vinylChloride(32, "Vinyl Chloride Monomer", new Element(C, 2), new Element(H, 3), new Element(Cl)), polyvinylChloride(33, "Polyvinyl Chloride", new Molecule(vinylChloride, 64)), methamphetamine(34, "Methamphetamine", new Element(C, 10), new Element(H, 15), new Element(N)), psilocybin(35, "Psilocybin", new Element(C, 12), new Element(H, 17), new Element(N, 2), new Element(O, 4), new Element(P)), iron3oxide(36, "Iron (III) Oxide", new Element(Fe, 2), new Element(O, 3)), strontiumNitrate(37, "Strontium Nitrate", new Element(Sr), new Molecule(nitrate, 2)), magnetite(38, "Magnetite", new Element(Fe, 3), new Element(O, 4)), magnesiumOxide(39, "Magnesium Oxide", new Element(Mg), new Element(O)), cucurbitacin(40, "Cucurbitacin", new Element(C, 30), new Element(H, 42), new Element(O, 7)), asparticAcid(41, "Aspartic Acid", new Element(C, 4), new Element(H, 7), new Element(N), new Element(O, 4)), hydroxylapatite(42, "Hydroxylapatite", new Element(Ca, 5), new Molecule(phosphate, 3), new Element(O), new Element(H)), alinine(43, "Alinine (amino acid)", new Element(C, 3), new Element(H, 7), new Element(N), new Element(O, 2)), glycine(44, "Glycine (amino acid)", new Element(C, 2), new Element(H, 5), new Element(N), new Element(O, 2)), serine(45, "Serine (amino acid)", new Element(C, 3), new Element(H, 7), new Molecule(nitrate)), mescaline(46, "Mescaline", new Element(C, 11), new Element(H, 17), new Molecule(nitrate)), methyl(47, "Methyl (ion)", new Element(C), new Element(H, 3)), methylene(48, "Methylene (ion)", new Element(C), new Element(H, 2)), cyanoacrylate(49, "Cyanoacrylate", new Molecule(methyl), new Molecule(methylene), new Element(C, 3), new Element(N), new Element(H), new Element(O, 2)), polycyanoacrylate(50, "Poly-cyanoacrylate", new Molecule(cyanoacrylate, 3)), redPigment(51, "Cobalt(II) nitrate", new Element(Co), new Molecule(nitrate, 2)), orangePigment(52, "Potassium Dichromate", new Element(K, 2), new Element(Cr, 2), new Element(O, 7)), yellowPigment(53, "Potassium Chromate", new Element(Cr), new Element(K, 2), new Element(O, 4)), limePigment(54, "Nickel(II) Chloride", new Element(Ni), new Element(Cl, 2)), lightbluePigment(55, "Copper(II) Sulfate", new Element(Cu), new Molecule(sulfate)), purplePigment(56, "Potassium Permanganate", new Element(K), new Element(Mn), new Element(O, 4)), greenPigment(57, "Zinc Green", new Element(Co), new Element(Zn), new Element(O, 2)), blackPigment(58, "Carbon Black", new Element(C), new Element(H, 2), new Element(O)), whitePigment(59, "Titanium Dioxide", new Element(Ti), new Element(O, 2)), metasilicate(60, "Metasilicate", new Element(Si), new Element(O, 3)), beryl(61, "Beryl", new Element(Be, 3), new Element(Al, 2), new Molecule(metasilicate, 6)), ethanol(62, "Ethyl Alcohol", new Element(C, 2), new Element(H, 6), new Element(O)), amphetamine(63, "Amphetamine", new Element(C, 9), new Element(H, 13), new Element(N)), theobromine(64, "Theobromine", new Element(C, 7), new Element(H, 8), new Element(N, 4), new Element(O, 2)), starch(65, "Starch", new Molecule(cellulose, 2), new Molecule(cellulose, 1)), sucrose(66, "Sucrose", new Element(C, 12), new Element(H, 22), new Element(O, 11)), pantherine(67, "Pantherine", new Element(C, 4), new Element(H, 6), new Element(N, 2), new Element(O, 2)), //LJDP you fail! There is not enought muscarine in a shroom to cause harm! The main active chemical is Muscimol (Pantherine). This chemical is similar to benzodiazapines! aluminiumOxide(68, "Aluminium Oxide", new Element(Al, 2), new Element(O, 3)), fullrene(69, "Carbon Nanotubes", new Element(C, 64), new Element(C, 64), new Element(C, 64), new Element(C, 64)), keratin(70, "Keratin", new Element(C, 2), new Molecule(water), new Element(N)), penicillin(71, "Penicillin", new Element(C, 16), new Element(H, 18), new Element(N, 2), new Element(O, 4), new Element(S)), testosterone(72, "Testosterone", new Element(C, 19), new Element(H, 28), new Element(O, 2)), kaolinite(73, "Kaolinite", new Element(Al, 2), new Element(Si, 2), new Element(O, 5), new Molecule(hydroxide, 4)), fingolimod(74, "Fingolimod", new Element(C, 19), new Element(H, 33), new Molecule(nitrogenDioxide)), // LJDP, You ment to say fingolimod not myrocin. arginine(75, "Arginine (amino acid)", new Element(C, 6), new Element(H, 14), new Element(N, 4), new Element(O, 2)), shikimicAcid(76, "Shikimic Acid", new Element(C, 7), new Element(H, 10), new Element(O, 5)), sulfuricAcid(77, "Sulfuric Acid", new Element(H, 2), new Element(S), new Element(O, 4)), glyphosate(78, "Glyphosate", new Element(C, 3), new Element(H, 8), new Element(N), new Element(O, 5), new Element(P)), asprin(79, "Aspirin", new Element(C, 9), new Element(H, 8), new Element(O, 4)), ddt(80, "DDT", new Element(C, 14), new Element(H, 9), new Element(Cl, 5)), dota(81, "DOTA", new Element(C, 16), new Element(H, 28), new Element(N, 4), new Element(O, 8)), poison(82, "T-2 Mycotoxin", new Element(C, 24), new Element(H, 34), new Element(O, 9)), salt(83, "Salt", new Element(Na, 1), new Element(Cl, 1)), nhthree(84, "Ammonia", new Element(N, 1), new Element(H, 3)), nod(85, "Nodularin", new Element(C, 41), new Element(H, 60), new Element(N, 8), new Element(O, 10)), ttx(86, "TTX (Tetrodotoxin)", new Element(C, 11), new Element(H, 11), new Element(N, 3), new Element(O, 8)), afroman(87, "THC", new Element(C, 21), new Element(H, 30), new Element(O, 2)), mt(88, "Methylcyclopentadienyl Manganese Tricarbonyl", new Element(C, 9), new Element(H, 7), new Element(Mn, 1), new Element(O, 3)), // Level 1 buli(89, "Tert-Butyllithium", new Element(Li, 1), new Element(C, 4), new Element(H, 9)), // Level 2 plat(90, "Chloroplatinic acid", new Element(H, 2), new Element(Pt, 1), new Element(Cl, 6)), // Level 3 phosgene(91, "Phosgene", new Element(C, 1), new Element(O, 1), new Element(Cl, 2)), aalc(92, "Allyl alcohol", new Element(C, 3), new Element(H, 6), new Element(O, 1)), hist(93, "Diphenhydramine", new Element(C, 17), new Element(H, 21), new Element(N), new Element(O)), pal2(94, "Batrachotoxin", new Element(C, 31), new Element(H, 42), new Element(N, 2), new Element(O, 6)), ret(95, "Retinol", new Element(C, 20), new Element(H, 30), new Element(O)), stevenk(96, "Xylitol", new Element(C, 5), new Element(H, 12), new Element(O, 5)), weedex(97, "Aminocyclopyrachlor", new Element(C,8), new Element(H,8), new Element(Cl), new Element(N,3), new Element(O,2)), biocide(98, "Ptaquiloside", new Element(C, 20), new Element(H, 30), new Element(O, 8)), xanax(99, "Alprazolam", new Element(C,17), new Element(H,13), new Element(Cl), new Element(N,4)), hcl(100, "Hydrogen Chloride", new Element(H), new Element(Cl)), redrocks(101, "Cocaine", new Element(C,17), new Element(H,21), new Element(N), new Element(O,4)), coke(102, "Cocaine Hydrochloride", new Molecule(redrocks), new Molecule(hcl)), blueorgodye(103, "1,4-dimethyl-7-isopropylazulene (Guaiazulene)", new Element(C,15), new Element(H,18)), redorgodye(104, "Pelargonidin", new Element(C,15), new Element(H,11), new Element(O,11)), purpleorgodye(105, "Delphinidin", new Element(C,15), new Element(H,11), new Element(O,7)), olivine(106, "Olivine", new Element(Fe,2), new Element(Si), new Element(O,4)), metblue(107, "Methylene Blue", new Element(C,16), new Element(H,18), new Element(N,3), new Element(S), new Element(Cl)), meoh(108, "Methyl Alcohol", new Molecule(methyl), new Molecule(hydroxide)), nicotine(109, "Nicotine", new Element(C,10), new Element(H,14), new Element(N,2)) ; public static EnumMolecule[] molecules = values(); private final String descriptiveName; private final ArrayList<Chemical> components; private int id; public float red; public float green; public float blue; public float red2; public float green2; public float blue2; EnumMolecule(int id, String descriptiveName, float colorRed, float colorGreen, float colorBlue, float colorRed2, float colorGreen2, float colorBlue2, Chemical... chemicals) { this.id = id; this.components = new ArrayList<Chemical>(); this.descriptiveName = descriptiveName; for (Chemical chemical : chemicals) { this.components.add(chemical); } Random random = new Random(id); this.red = colorRed; this.green = colorGreen; this.blue = colorBlue; this.red2 = colorRed2; this.green2 = colorGreen2; this.blue2 = colorBlue2; } @Deprecated EnumMolecule(int id, String descriptiveName, Chemical... chemicals) { this(id, descriptiveName, getRandomColor(), getRandomColor(), getRandomColor(), getRandomColor(), getRandomColor(), getRandomColor(), chemicals); // Your molecule will have random colors until you give it a proper color code. } private static float getRandomColor() { Random random = new Random(); return random.nextFloat(); } public static EnumMolecule getById(int id) { for (EnumMolecule molecule : molecules) { if (molecule.id == id) return molecule; } return null; } public int id() { return this.id; } public String descriptiveName() { return this.descriptiveName; } public ArrayList<Chemical> components() { return this.components; } }
package com.flipstudio.pluma; import java.io.File; import java.util.List; import java.util.Map; import static com.flipstudio.pluma.Pluma.SQLITE_DONE; import static com.flipstudio.pluma.Pluma.SQLITE_MISUSE; import static com.flipstudio.pluma.Pluma.SQLITE_OK; import static com.flipstudio.pluma.Pluma.SQLITE_OPEN_CREATE; import static com.flipstudio.pluma.Pluma.SQLITE_OPEN_READWRITE; import static java.util.Arrays.asList; public final class Database { //region Fields private final String mPath; private String mTempDir; private long mDB; private DatabaseListener mDatabaseListener; //endregion //region Static static { System.loadLibrary("pluma"); } //endregion //region Constructors public Database(String path) { mPath = path; mTempDir = new File(path).getParent(); } //endregion //region Native private native long open(String filePath, int flags, int[] ppOpenCode, String[] ppOpenError); private native long prepare(long db, String sql, int[] ppPrepareCode); private native int exec(long db, String sql, String[] ppOutError); private native int close(long db); private native long lastInsertId(long db); private native String lastErrorMessage(long db); private native void setTempDir(String tempDir); //endregion //region Public public void open() throws SQLiteException { open(SQLITE_OPEN_CREATE | SQLITE_OPEN_READWRITE); } public void open(int flags) throws SQLiteException { int[] codes = new int[1]; String[] errors = new String[1]; long db = open(mPath, flags, codes, errors); if (codes[0] != SQLITE_OK || db == 0 || errors[0] != null) { throw new SQLiteException(codes[0], errors[0]); } mDB = db; setTempDir(mTempDir); } public void setTempDirectory(String tempDir) { if (isOpen()) { setTempDir(tempDir); } mTempDir = tempDir; } public Statement prepareStatement(String sql) throws SQLiteException { int[] prepareCode = new int[1]; int rc; long stmt = prepare(mDB, sql, prepareCode); rc = prepareCode[0]; if (rc != SQLITE_OK || stmt == 0) { throw new SQLiteException(rc, lastErrorMessage(mDB), sql); } return new Statement(stmt); } public void execute(String sql) throws SQLiteException { String[] errors = new String[1]; int rc = exec(mDB, sql, errors); if (rc != SQLITE_OK) { throw new SQLiteException(rc, errors[0], sql); } notifyListenerOnExecuteQuery(sql); } public boolean executeUpdate(String sql) throws SQLiteException { return executeUpdate(sql, (Object[]) null); } public boolean executeUpdate(String sql, Map<String, Object> arguments) throws SQLiteException { return executeUpdate(sql, null, arguments); } public boolean executeUpdate(String sql, Object... arguments) throws SQLiteException { return executeUpdate(sql, arguments == null ? null : asList(arguments)); } public boolean executeUpdate(String sql, List<Object> arguments) throws SQLiteException { return executeUpdate(sql, arguments, null); } public ResultSet executeQuery(String sql) throws SQLiteException { return executeQuery(sql, (Object[]) null); } public ResultSet executeQuery(String sql, Map<String, Object> arguments) throws SQLiteException { return executeQuery(sql, null, arguments); } public ResultSet executeQuery(String sql, Object... arguments) throws SQLiteException { return executeQuery(sql, arguments == null ? null : asList(arguments)); } public ResultSet executeQuery(String sql, List<Object> arguments) throws SQLiteException { return executeQuery(sql, arguments, null); } public long getLastInsertId() { return lastInsertId(mDB); } public String getLastErrorMessage() { return lastErrorMessage(mDB); } /* Use with native code. sqlite3 *db = reinterpret_cast<sqlite3*>(jdb); */ public long getSQLiteHandler() { return mDB; } public boolean close() throws SQLiteException { int rc = close(mDB); if (rc != SQLITE_OK) { throw new SQLiteException(rc, lastErrorMessage(mDB)); } mDB = 0; return true; } public boolean isClosed() { return mDB == 0; } public boolean isOpen() { return mDB > 0; } public String getDatabasePath() { return mPath; } //endregion //region Private private boolean executeUpdate(String sql, List<Object> listArgs, Map<String, Object> mapArgs) throws SQLiteException { Statement statement = compileStatement(sql, listArgs, mapArgs); int rc = statement.step(); statement.close(); if (rc != SQLITE_DONE) { throw new SQLiteException(rc, getLastErrorMessage(), sql); } notifyListenerOnExecuteQuery(sql); return true; } private ResultSet executeQuery(String query, List<Object> listArgs, Map<String, Object> mapArgs) throws SQLiteException { ResultSet rs = new ResultSet(this, compileStatement(query, listArgs, mapArgs)); notifyListenerOnExecuteQuery(query); return rs; } private void notifyListenerOnExecuteQuery(String sql) { if (mDatabaseListener != null) { mDatabaseListener.onExecuteQuery(sql); } } private Statement compileStatement(String query, List<Object> listArgs, Map<String, Object> mapArgs) throws SQLiteException { Statement statement = prepareStatement(query); int rc, index = 1, bindsCount = statement.getBindParameterCount() + 1; if (mapArgs != null && mapArgs.size() > 0) { String parameterName; int parameterIndex; for (String key : mapArgs.keySet()) { parameterName = ":" + key; if ((parameterIndex = statement.getParameterIndex(parameterName)) > 0) { statement.bindObject(parameterIndex, mapArgs.get(key)); index++; } else { throw new SQLiteException(SQLITE_MISUSE, "Parameter index not found for name " + key + "'", query); } } } else if (listArgs != null && listArgs.size() > 0) { for (Object object : listArgs) { statement.bindObject(index++, object); } } if (index != bindsCount) { rc = statement.close(); if (rc != SQLITE_OK) { throw new SQLiteException(rc, lastErrorMessage(mDB)); } throw new SQLiteException(SQLITE_MISUSE, "The bind count is not correct for the number of variables", query); } return statement; } //endregion //region Getters and Setters public DatabaseListener getDatabaseListener() { return mDatabaseListener; } public void setDatabaseListener(DatabaseListener databaseListener) { mDatabaseListener = databaseListener; } //endregion public interface DatabaseListener { public void onExecuteQuery(String query); } }
package net.domesdaybook.expression.parser; import java.util.LinkedHashSet; import java.util.Set; import net.domesdaybook.matcher.singlebyte.ByteUtilities; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; /** * A utility class of static helper methods to use when parsing expressions. * * @author Matt Palmer */ public class ParseUtils { public static final String TYPE_ERROR = "Type [%s] not supported by the compiler."; public static final String QUOTE = "\'"; private ParseUtils() { } /** * Returns a byte from its hexadecimal string representation. * * @param hexByte a hexadecimal representation of a byte. * @return the byte encoded by the hex representation. */ public static byte parseHexByte(final String hexByte) { return (byte) Integer.parseInt(hexByte, 16); } /** * Returns a byte from a parse-tree node containing a byte value. * * @param treeNode The parse-tree node to extract the byte value from. * @return The byte encoded by the parse-tree node. */ public static byte getHexByteValue(final Tree treeNode) { return parseHexByte(treeNode.getText()); } /** * Returns the byte value of a bitmask-type tree node. * * @param treeNode THe parse-tree node to extract the bitmask value from. * @return The byte value of the bitmask in the parse-tree. */ public static byte getBitMaskValue(final Tree treeNode) { final Tree childNode = treeNode.getChild(0); return parseHexByte(childNode.getText()); } /** * Returns an integer value of the specified child of the parse-tree node. * The integer must be encoded in base-10, not hexadecimal or any other base. * * @param treeNode The parent node from whose children we want to extract an integer value. * @param childIndex The index of the child to extract the integer from. * @return The integer value of the specified child of the parse-tree node. */ public static int getChildIntValue(final Tree treeNode, final int childIndex) { final Tree childNode = treeNode.getChild(childIndex); return Integer.parseInt(childNode.getText(), 10); } /** * Returns a string value of the specified child of the parse-tree node. * * @param treeNode The parent node from whose children we want to extract a string value. * @param childIndex The index of the child to extract the string from. * @return The string value of the specified child of the parse-tree node. */ public static String getChildStringValue(final Tree treeNode, final int childIndex) { return treeNode.getChild(childIndex).getText(); } /** * Gets the minimum repeat value of a repeat node in a parse-tree. * * @param treeNode the repeat node in the parse-tree. * @return The minimum repeat value of the repeat node. */ public static int getMinRepeatValue(final Tree treeNode) { return getChildIntValue(treeNode, 0); } /** * Gets the maximum repeat value of a repeat node in a parse-tree. * * @param treeNode the repeat node in the parse-tree. * @return The maximum repeat value of the repeat node. */ public static int getMaxRepeatValue(final Tree treeNode) { return getChildIntValue(treeNode, 1); } /** * Gets the node which must be repeated in the parse-tree under a * parent repeat-node. * * @param treeNode the node to repeat in a repeat node. * @return The node which needs to be repeated under a parent repeat node. */ public static Tree getRepeatNode(final Tree treeNode) { return treeNode.getChild(2); } /** * Calculates a value of a set given the parent set node (or inverted set node) * Sets can contain bytes, strings (case sensitive & insensitive), ranges, * other sets nested inside them (both normal and inverted) and bitmasks. * * This can be recursive procedure if sets are nested within one another. * * @param node The set node to calculate a set of byte values for. * @param cumulativeSet The set of cumulative bytes so far. */ public static Set<Byte> calculateSetValue(final CommonTree node) throws ParseException { final Set<Byte> setValues = new LinkedHashSet<Byte>(320); for (int childIndex = 0, stop = node.getChildCount(); childIndex < stop; childIndex++) { final CommonTree childNode = (CommonTree) node.getChild(childIndex); switch (childNode.getType()) { // Recursively build if we have nested child sets: case regularExpressionParser.SET: { final Set<Byte> nestedSetValues = calculateSetValue(childNode); setValues.addAll(nestedSetValues); break; } case regularExpressionParser.INVERTED_SET: { final Set<Byte> nestedSetValues = calculateSetValue(childNode); setValues.addAll(inverseOf(nestedSetValues)); break; } // non recursive: just build values: case regularExpressionParser.BYTE: { setValues.add(ParseUtils.getHexByteValue(childNode)); break; } case regularExpressionParser.ALL_BITMASK: { final byte allBitMask = ParseUtils.getBitMaskValue(childNode); setValues.addAll(ByteUtilities.getBytesMatchingAllBitMask(allBitMask)); break; } case regularExpressionParser.ANY_BITMASK: { final byte allBitMask = ParseUtils.getBitMaskValue(childNode); setValues.addAll(ByteUtilities.getBytesMatchingAnyBitMask(allBitMask)); break; } case regularExpressionParser.RANGE: { int minRangeValue; int maxRangeValue; String minRange = ParseUtils.getChildStringValue(childNode, 0); String maxRange = ParseUtils.getChildStringValue(childNode, 1); if (minRange.startsWith(QUOTE)) { minRangeValue = (int) minRange.charAt(1); } else { minRangeValue = Integer.parseInt(minRange, 16); } if (maxRange.startsWith(QUOTE)) { maxRangeValue = (int) maxRange.charAt(1); } else { maxRangeValue = Integer.parseInt(maxRange, 16); } if (minRangeValue > maxRangeValue) { int swapTemp = minRangeValue; minRangeValue = maxRangeValue; maxRangeValue = swapTemp; } //if (minRange < 0 || maxRange > 255) { for (int rangeValue = minRangeValue; rangeValue <= maxRangeValue; rangeValue++) { setValues.add((byte) rangeValue); } break; } case regularExpressionParser.CASE_SENSITIVE_STRING: { final String stringValue = trimString(childNode.getText()); for (int charIndex = 0; charIndex < stringValue.length(); charIndex++ ) { final char charAt = stringValue.charAt(charIndex); setValues.add((byte) charAt); } break; } case regularExpressionParser.CASE_INSENSITIVE_STRING: { final String stringValue = trimString(childNode.getText()); for (int charIndex = 0; charIndex < stringValue.length(); charIndex++ ) { final char charAt = stringValue.charAt(charIndex); if (charAt >= 'a' && charAt <= 'z') { setValues.add((byte) Character.toUpperCase(charAt)); } else if (charAt >= 'A' && charAt <= 'A') { setValues.add((byte) Character.toLowerCase(charAt)); } setValues.add((byte) charAt); } break; } default: { final String message = String.format(TYPE_ERROR, getTokenName(childNode)); throw new ParseException(message); } } } return setValues; } /** * Returns a set of bytes which contains the inverse of the set of bytes * passed in. All the bytes which were not in the original set will be * present, and all the byte which were will not be. * * @param byteSet The set of bytes to invert. * @return The inverse of the set of bytes passed in. */ public static Set<Byte> inverseOf(final Set<Byte> byteSet) { final Set<Byte> inverseSet = new LinkedHashSet<Byte>(); for (int value = 0; value < 256; value++) { if (!byteSet.contains((byte) value)) { inverseSet.add((byte) value); } } return inverseSet; } /** * Removes the leading and trailing character from a string. * This is used to remove quotes from quoted strings. * * @param str The string to trim. * @return A string without the first and last character. */ public static String trimString(final String str) { return str.substring(1, str.length() - 1); } /** * Gets the string name of the type of a parse-tree node. * * @param node The node to get the type name of. * @return The type name of the parse tree node. */ public static String getTokenName(final CommonTree node) { return regularExpressionParser.tokenNames[node.getType()]; } /** * Returns a "type not supported" error message for a parse-tree node. * * @param node The node to return an error message for. * @return A type not supported error message for the node. */ public static String getTypeErrorMessage(final CommonTree node) { return String.format(TYPE_ERROR, getTokenName(node)); } }
package SW9.controllers; import SW9.NewMain; import SW9.abstractions.Component; import SW9.abstractions.Edge; import SW9.abstractions.Location; import SW9.backend.UPPAALDriver; import SW9.presentations.CanvasPresentation; import SW9.presentations.LocationPresentation; import SW9.utility.UndoRedoStack; import SW9.utility.colors.Color; import SW9.utility.helpers.BindingHelper; import SW9.utility.helpers.SelectHelperNew; import SW9.utility.keyboard.Keybind; import SW9.utility.keyboard.KeyboardTracker; import com.jfoenix.controls.JFXTextField; import javafx.beans.property.ObjectProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.Cursor; import javafx.scene.Group; import javafx.scene.control.Label; import javafx.scene.control.TextArea; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyCodeCombination; import javafx.scene.input.MouseEvent; import javafx.scene.layout.StackPane; import javafx.scene.shape.Circle; import javafx.scene.shape.Path; import javafx.scene.shape.Rectangle; import java.net.URL; import java.util.ResourceBundle; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.atomic.AtomicInteger; public class LocationController implements Initializable, SelectHelperNew.ColorSelectable { private static final AtomicInteger hiddenLocationID = new AtomicInteger(0); private static final long DOUBLE_PRESS_SHOW_PROPERTIES_DELAY = 500; private final ObjectProperty<Location> location = new SimpleObjectProperty<>(); private final ObjectProperty<Component> component = new SimpleObjectProperty<>(); public Group root; public Circle initialIndicator; public StackPane finalIndicator; public Group shakeContent; public Label nameLabel; public Rectangle rectangle; public Rectangle rectangleShakeIndicator; public Circle circle; public Circle circleShakeIndicator; public Path octagon; public Path octagonShakeIndicator; public StackPane propertiesPane; public JFXTextField nameField; public TextArea invariantField; private boolean isPlaced; private long lastPress = 0; private TimerTask reachabilityCheckTask; @Override public void initialize(final URL location, final ResourceBundle resources) { this.location.addListener((obsLocation, oldLocation, newLocation) -> { // The radius property on the abstraction must reflect the radius in the view newLocation.radiusProperty().bind(circle.radiusProperty()); // The scale property on the abstraction must reflect the radius in the view newLocation.scaleProperty().bind(root.scaleXProperty()); // initialize the name field and its bindings nameField.setText(newLocation.getName()); newLocation.nameProperty().bind(nameField.textProperty()); // initialize the invariant field and its bindings invariantField.setText(newLocation.getInvariant()); newLocation.invariantProperty().bind(invariantField.textProperty()); // If the location is not a normal location (not initial/final) make it draggable if(newLocation.getType() == Location.Type.NORMAL) { root.setOnMouseDragged(event -> { root.setLayoutX(CanvasPresentation.mouseTracker.gridXProperty().subtract(getComponent().xProperty()).doubleValue()); root.setLayoutY(CanvasPresentation.mouseTracker.gridYProperty().subtract(getComponent().yProperty()).doubleValue()); }); } }); // Scale x and y 1:1 (based on the x-scale) root.scaleYProperty().bind(root.scaleXProperty()); // Register click listener on canvas to hide the property pane when the canvas is clicked CanvasPresentation.mouseTracker.registerOnMouseClickedEventHandler(event -> propertiesPane.setVisible(false)); // Register a key-bind for hiding the property pane (using a hidden locationID) KeyboardTracker.registerKeybind(KeyboardTracker.HIDE_LOCATION_PROPERTY_PANE + hiddenLocationID.getAndIncrement(), new Keybind(new KeyCodeCombination(KeyCode.ESCAPE), () -> { propertiesPane.setVisible(false); })); initializeReachabilityCheck(); } public void initializeReachabilityCheck() { final int interval = 5000; // Could not run query reachabilityCheckTask = new TimerTask() { @Override public void run() { if (getComponent() == null || getLocation() == null) return; // The location might have been remove from the component (through ctrl + z) if (getLocation().getType() == Location.Type.NORMAL && !getComponent().getLocations().contains(getLocation())) return; UPPAALDriver.verify( "E<> " + getComponent().getName() + "." + getLocation().getName(), result -> { final LocationPresentation locationPresentation = (LocationPresentation) LocationController.this.root; locationPresentation.animateShakeWarning(!result); }, e -> { System.out.println("hsj"); System.out.println(e); // Could not run query }, NewMain.getProject().getComponents() ); } }; new Timer().schedule(reachabilityCheckTask, 0, interval); } public Location getLocation() { return location.get(); } public void setLocation(final Location location) { this.location.set(location); if (location.getType().equals(Location.Type.NORMAL)) { root.layoutXProperty().bind(location.xProperty()); root.layoutYProperty().bind(location.yProperty()); } else { location.xProperty().bind(root.layoutXProperty()); location.yProperty().bind(root.layoutYProperty()); isPlaced = true; } } public ObjectProperty<Location> locationProperty() { return location; } public Component getComponent() { return component.get(); } public void setComponent(final Component component) { this.component.set(component); } public ObjectProperty<Component> componentProperty() { return component; } @FXML private void mouseEntered() { circle.setCursor(Cursor.HAND); ((LocationPresentation) root).animateHoverEntered(); // Keybind for making location urgent KeyboardTracker.registerKeybind(KeyboardTracker.MAKE_LOCATION_URGENT, new Keybind(new KeyCodeCombination(KeyCode.U), () -> { final Location.Urgency previousUrgency = location.get().getUrgency(); if (previousUrgency.equals(Location.Urgency.URGENT)) { UndoRedoStack.push(() -> { // Perform getLocation().setUrgency(Location.Urgency.NORMAL); }, () -> { // Undo getLocation().setUrgency(previousUrgency); }); } else { UndoRedoStack.push(() -> { // Perform getLocation().setUrgency(Location.Urgency.URGENT); }, () -> { // Undo getLocation().setUrgency(previousUrgency); }); } })); // Keybind for making location committed KeyboardTracker.registerKeybind(KeyboardTracker.MAKE_LOCATION_COMMITTED, new Keybind(new KeyCodeCombination(KeyCode.C), () -> { final Location.Urgency previousUrgency = location.get().getUrgency(); if (previousUrgency.equals(Location.Urgency.COMMITTED)) { UndoRedoStack.push(() -> { // Perform getLocation().setUrgency(Location.Urgency.NORMAL); }, () -> { // Undo getLocation().setUrgency(previousUrgency); }); } else { UndoRedoStack.push(() -> { // Perform getLocation().setUrgency(Location.Urgency.COMMITTED); }, () -> { // Undo getLocation().setUrgency(previousUrgency); }); } })); } @FXML private void mouseExited() { circle.setCursor(Cursor.DEFAULT); ((LocationPresentation) root).animateHoverExited(); KeyboardTracker.unregisterKeybind(KeyboardTracker.MAKE_LOCATION_URGENT); KeyboardTracker.unregisterKeybind(KeyboardTracker.MAKE_LOCATION_COMMITTED); } @FXML private void mouseClicked(final MouseEvent event) { event.consume(); // Double clicking the location opens the properties pane if(lastPress + DOUBLE_PRESS_SHOW_PROPERTIES_DELAY >= System.currentTimeMillis()) { propertiesPane.setVisible(true); // Place the location in front (so that the properties pane is above edges etc) root.toFront(); } else { lastPress = System.currentTimeMillis(); } } @FXML private void mousePressed(final MouseEvent event) { final Component component = getComponent(); event.consume(); if (isPlaced) { final Edge unfinishedEdge = component.getUnfinishedEdge(); if (unfinishedEdge != null) { unfinishedEdge.setTargetLocation(getLocation()); } else { // If shift is being held down, start drawing a new edge if (event.isShiftDown()) { final Edge newEdge = new Edge(getLocation()); UndoRedoStack.push(() -> { // Perform component.addEdge(newEdge); }, () -> { // Undo component.removeEdge(newEdge); }); } // Otherwise, select the location else { SelectHelperNew.select(this); } } } else { /*subject.xProperty().unbind(); subject.yProperty().unbind(); subject.xProperty().set(CanvasPresentation.mouseTracker.gridXProperty().subtract(x).get()); subject.yProperty().set(CanvasPresentation.mouseTracker.gridYProperty().subtract(y).get());*/ // Unbind presentation root x and y coordinates (bind the view properly to enable dragging) root.layoutXProperty().unbind(); root.layoutYProperty().unbind(); // Bind the location to the presentation root x and y getLocation().xProperty().bind(root.layoutXProperty()); getLocation().yProperty().bind(root.layoutYProperty()); isPlaced = true; } } @Override public void color(final Color color, final Color.Intensity intensity) { final Location location = getLocation(); // Set the color of the location location.setColorIntensity(intensity); location.setColor(color); } @Override public Color getColor() { return getLocation().getColor(); } @Override public Color.Intensity getColorIntensity() { return getLocation().getColorIntensity(); } @Override public void select() { ((SelectHelperNew.Selectable) root).select(); } @Override public void deselect() { ((SelectHelperNew.Selectable) root).deselect(); } }
package com.jed.actor; import org.lwjgl.opengl.GL11; import com.jed.util.Vector; /** * * @author jlinde, Peter Colapietro * */ public class PolygonBoundary extends Boundary { private double rightBound = 0; private double leftBound = 0; private double upperBound = 0; private double lowerBound = 0; /** * * @param position position vector * @param verticies array of vertices */ public PolygonBoundary(Vector position, Vector[] verticies) { super(position, verticies); //Find Max Bounds for quad tree for (Vector each : verticies) { if (each.x > rightBound) rightBound = each.x; if (each.x < leftBound) leftBound = each.x; if (each.y < upperBound) upperBound = each.y; if (each.y > lowerBound) lowerBound = each.y; } } @Override public double getRightBound() { return owner.position.x + position.x + rightBound; } @Override public double getLeftBound() { return owner.position.x + position.x + leftBound; } @Override public double getUpperBound() { return owner.position.y + position.y + upperBound; } @Override public double getLowerBound() { return owner.position.y + position.y + lowerBound; } @Override public int getWidth() { return (int) (rightBound - leftBound); } @Override public int getHeight() { return (int) (lowerBound - upperBound); } @Override public void draw() { //Bounding Box GL11.glColor3f(1f, 0, 0); GL11.glBegin(GL11.GL_LINE_LOOP); for (Vector each : verticies) { owner.drawChildVertex2f(position.x + each.x, position.y + each.y); } GL11.glEnd(); } }
package backtype.hadoop.pail; import backtype.hadoop.formats.RecordOutputStream; import backtype.support.Utils; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordWriter; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.Progressable; import org.apache.log4j.Logger; import org.apache.hadoop.mapred.FileOutputCommitter; public class PailOutputFormat extends FileOutputFormat<Text, BytesWritable> { public static Logger LOG = Logger.getLogger(PailOutputFormat.class); public static final String SPEC_ARG = "pail_spec_arg"; // we limit the size of outputted files because of s3 file limits public static final long FILE_LIMIT_SIZE_BYTES = 1L * 1024 * 1024 * 1024; // 1GB /** * Change this to just use Pail#writeObject - auatomically fix up BytesWritable */ public static class PailRecordWriter implements RecordWriter<Text, BytesWritable> { private Pail _pail; private String _unique; protected static class OpenAttributeFile { public String attr; public String filename; public RecordOutputStream os; public long numBytesWritten = 0; public OpenAttributeFile(String attr, String filename, RecordOutputStream os) { this.attr = attr; this.filename = filename; this.os = os; } } private Map<String, OpenAttributeFile> _outputters = new HashMap<String, OpenAttributeFile>(); private int writtenRecords = 0; private int numFilesOpened = 0; public PailRecordWriter(JobConf conf, String unique, Progressable p) throws IOException { PailSpec spec = (PailSpec) Utils.getObject(conf, SPEC_ARG); Pail.create(getOutputPath(conf).toString(), spec, false); // this is a hack to get the work output directory since it's not exposed directly. instead it only // provides a path to a particular file. _pail = Pail.create(FileOutputFormat.getTaskOutputPath(conf, unique).getParent().toString(), spec, false); _unique = unique; } public void write(Text k, BytesWritable v) throws IOException { String attr = k.toString(); OpenAttributeFile oaf = _outputters.get(attr); if(oaf!=null && oaf.numBytesWritten >= FILE_LIMIT_SIZE_BYTES) { closeAttributeFile(oaf); oaf = null; _outputters.remove(attr); } if(oaf==null) { String filename; if(!attr.isEmpty()) { filename = attr + "/" + _unique + numFilesOpened; } else { filename = _unique + numFilesOpened; } numFilesOpened++; LOG.info("Opening " + filename + " for attribute " + attr); //need overwrite for situations where regular FileOutputCommitter isn't used (like S3) oaf = new OpenAttributeFile(attr, filename, _pail.openWrite(filename, true)); _outputters.put(attr, oaf); } oaf.os.writeRaw(v.getBytes(), 0, v.getLength()); oaf.numBytesWritten+=v.getLength(); logProgress(); } protected void logProgress() { writtenRecords++; if(writtenRecords%100000 == 0) { for(OpenAttributeFile oaf: _outputters.values()) { LOG.info("Attr:" + oaf.attr + " Filename:" + oaf.filename + " Bytes written:" + oaf.numBytesWritten); } } } protected void closeAttributeFile(OpenAttributeFile oaf) throws IOException { LOG.info("Closing " + oaf.filename + " for attr " + oaf.attr); //print out the size of the file here oaf.os.close(); LOG.info("Closed " + oaf.filename + " for attr " + oaf.attr); } public void close(Reporter rprtr) throws IOException { for(String key: _outputters.keySet()) { closeAttributeFile(_outputters.get(key)); rprtr.progress(); } _outputters.clear(); } } public RecordWriter<Text, BytesWritable> getRecordWriter(FileSystem ignored, JobConf jc, String string, Progressable p) throws IOException { return new PailRecordWriter(jc, string, p); } @Override public void checkOutputSpecs(FileSystem fs, JobConf conf) throws IOException { // because this outputs multiple files, doesn't work with speculative execution on something like EMR with S3 if(!(conf.getOutputCommitter() instanceof FileOutputCommitter)) { if(conf.getMapSpeculativeExecution() && conf.getNumReduceTasks()==0 || conf.getReduceSpeculativeExecution()) { throw new IllegalArgumentException("Cannot use speculative execution with PailOutputFormat unless FileOutputCommitter is enabled"); } } } }
package com.perimeterx.api; import com.perimeterx.api.activities.ActivityHandler; import com.perimeterx.api.activities.BufferedActivityHandler; import com.perimeterx.api.providers.CombinedIPProvider; import com.perimeterx.api.providers.DefaultHostnameProvider; import com.perimeterx.api.providers.HostnameProvider; import com.perimeterx.api.providers.IPProvider; import com.perimeterx.api.proxy.DefaultReverseProxy; import com.perimeterx.api.proxy.ReverseProxy; import com.perimeterx.api.remoteconfigurations.DefaultRemoteConfigManager; import com.perimeterx.api.remoteconfigurations.RemoteConfigurationManager; import com.perimeterx.api.remoteconfigurations.TimerConfigUpdater; import com.perimeterx.api.verificationhandler.DefaultVerificationHandler; import com.perimeterx.api.verificationhandler.TestVerificationHandler; import com.perimeterx.api.verificationhandler.VerificationHandler; import com.perimeterx.http.PXHttpClient; import com.perimeterx.internals.PXCookieValidator; import com.perimeterx.internals.PXS2SValidator; import com.perimeterx.models.PXContext; import com.perimeterx.models.activities.UpdateReason; import com.perimeterx.models.configuration.PXConfiguration; import com.perimeterx.models.configuration.PXDynamicConfiguration; import com.perimeterx.models.exceptions.PXException; import com.perimeterx.models.risk.PassReason; import com.perimeterx.models.risk.S2SErrorReason; import com.perimeterx.utils.PXLogger; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponseWrapper; import java.io.IOException; import java.net.URISyntaxException; public class PerimeterX { private static final PXLogger logger = PXLogger.getLogger(PerimeterX.class); private PXConfiguration configuration; private PXS2SValidator serverValidator; private PXCookieValidator cookieValidator; private ActivityHandler activityHandler; private IPProvider ipProvider; private HostnameProvider hostnameProvider; private VerificationHandler verificationHandler; private ReverseProxy reverseProxy; private void init(PXConfiguration configuration) throws PXException { logger.debug(PXLogger.LogReason.DEBUG_INITIALIZING_MODULE); configuration.mergeConfigurations(); this.configuration = configuration; hostnameProvider = new DefaultHostnameProvider(); ipProvider = new CombinedIPProvider(configuration); PXHttpClient pxClient = new PXHttpClient(configuration); this.activityHandler = new BufferedActivityHandler(pxClient, this.configuration); if (configuration.isRemoteConfigurationEnabled()) { RemoteConfigurationManager remoteConfigManager = new DefaultRemoteConfigManager(configuration, pxClient); PXDynamicConfiguration initialConfig = remoteConfigManager.getConfiguration(); if (initialConfig == null) { remoteConfigManager.disableModuleOnError(); } else { remoteConfigManager.updateConfiguration(initialConfig); } TimerConfigUpdater timerConfigUpdater = new TimerConfigUpdater(remoteConfigManager, configuration, activityHandler); timerConfigUpdater.schedule(); } this.serverValidator = new PXS2SValidator(pxClient, this.configuration); this.cookieValidator = new PXCookieValidator(this.configuration); setVerificationHandler(); this.activityHandler.handleEnforcerTelemetryActivity(configuration, UpdateReason.INIT); this.reverseProxy = new DefaultReverseProxy(configuration, ipProvider); } private void setVerificationHandler() { if (this.configuration.isTestingMode()) { this.verificationHandler = new TestVerificationHandler(this.configuration, this.activityHandler); } else { this.verificationHandler = new DefaultVerificationHandler(this.configuration, this.activityHandler); } } public PerimeterX(PXConfiguration configuration) throws PXException { init(configuration); } public PerimeterX(PXConfiguration configuration, IPProvider ipProvider, HostnameProvider hostnameProvider) throws PXException { init(configuration); this.ipProvider = ipProvider; this.hostnameProvider = hostnameProvider; } public PerimeterX(PXConfiguration configuration, IPProvider ipProvider) throws PXException { init(configuration); this.ipProvider = ipProvider; } public PerimeterX(PXConfiguration configuration, HostnameProvider hostnameProvider) throws PXException { init(configuration); this.hostnameProvider = hostnameProvider; } /** * Verify http request using cookie or PX server call * * @param req - current http call examined by PX * @param responseWrapper - response wrapper on which we will set the response according to PX verification. * @return PXContext, or null if module is disabled * @throws PXException - PXException */ public PXContext pxVerify(HttpServletRequest req, HttpServletResponseWrapper responseWrapper) throws PXException { PXContext context = null; logger.debug(PXLogger.LogReason.DEBUG_STARTING_REQUEST_VERIFICATION); try { if (!moduleEnabled()) { logger.debug(PXLogger.LogReason.DEBUG_MODULE_DISABLED); return null; } context = new PXContext(req, this.ipProvider, this.hostnameProvider, configuration); if (shouldReverseRequest(req, responseWrapper)) { context.setFirstPartyRequest(true); return context; } //if path ext is defined at whitelist, let the request pass if(configuration.isExtWhiteListed(req.getRequestURI())) { return null; } handleCookies(context); context.setVerified(verificationHandler.handleVerification(context, responseWrapper)); } catch (Exception e) { logger.debug(PXLogger.LogReason.ERROR_COOKIE_EVALUATION_EXCEPTION, e.getMessage()); // If any general exception is being thrown, notify in page_request activity if (context != null) { context.setPassReason(PassReason.S2S_ERROR); if (context.getS2sErrorReason() == S2SErrorReason.NO_ERROR) { context.setS2SErrorInfo(S2SErrorReason.UNKNOWN_ERROR, e.toString(), -1, null); } activityHandler.handlePageRequestedActivity(context); context.setVerified(true); } } return context; } private void handleCookies(PXContext context) { if (cookieValidator.verify(context)) { logger.debug(PXLogger.LogReason.DEBUG_COOKIE_EVALUATION_FINISHED, context.getRiskScore()); // Cookie is valid (exists and not expired) so we can block according to it's score return; } logger.debug(PXLogger.LogReason.DEBUG_COOKIE_MISSING); if (serverValidator.verify(context)) { logger.debug(PXLogger.LogReason.DEBUG_COOKIE_VERSION_FOUND, context.getCookieVersion()); } } private boolean shouldReverseRequest(HttpServletRequest req, HttpServletResponseWrapper res) throws IOException, URISyntaxException { return reverseProxy.reversePxClient(req, res) || reverseProxy.reversePxXhr(req, res) || reverseProxy.reverseCaptcha(req, res); } private boolean moduleEnabled() { return this.configuration.isModuleEnabled(); } /** * Set activity handler * * @param activityHandler - new activity handler to use */ public void setActivityHandler(ActivityHandler activityHandler) { this.activityHandler = activityHandler; setVerificationHandler(); } /** * Set IP Provider * * @param ipProvider - IP provider that is used to extract ip from request */ public void setIpProvider(IPProvider ipProvider) { this.ipProvider = ipProvider; } /** * Set Hostname Provider * * @param hostnameProvider - Used to extract hostname from request */ public void setHostnameProvider(HostnameProvider hostnameProvider) { this.hostnameProvider = hostnameProvider; } /** * Set Set Verification Handler * * @param verificationHandler - sets the verification handler for user customization */ public void setVerificationHandler(VerificationHandler verificationHandler) { this.verificationHandler = verificationHandler; } }
package com.pump.showcase; import java.awt.Dimension; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; import javax.swing.AbstractButton; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JTextField; import javax.swing.LookAndFeel; import javax.swing.SwingConstants; import javax.swing.UIDefaults; import javax.swing.UIManager; import javax.swing.UIManager.LookAndFeelInfo; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.plaf.ButtonUI; import com.pump.icon.RefreshIcon; import com.pump.image.ImageLoader; import com.pump.image.pixel.Scaling; import com.pump.inspector.Inspector; import com.pump.plaf.BevelButtonUI; import com.pump.plaf.CapsuleButtonUI; import com.pump.plaf.GradientButtonUI; import com.pump.plaf.PlasticButtonUI; import com.pump.plaf.QButtonUI; import com.pump.plaf.RecessedButtonUI; import com.pump.plaf.RetroButtonUI; import com.pump.plaf.RoundRectButtonUI; import com.pump.plaf.SquareButtonUI; import com.pump.plaf.TexturedButtonUI; import com.pump.plaf.VistaButtonUI; import com.pump.plaf.XPButtonUI; import com.pump.plaf.XPSubtleButtonUI; import com.pump.reflect.Reflection; import com.pump.swing.FontComboBox; import com.pump.util.JVM; public class JButtonDemo extends ShowcaseExampleDemo { private static final long serialVersionUID = 1L; private static final String NONE = "none"; public enum Horizontal { Right(SwingConstants.RIGHT), Left(SwingConstants.LEFT), Center( SwingConstants.CENTER), Leading(SwingConstants.LEADING), Trailing( SwingConstants.TRAILING); public static Horizontal valueOf(int constant) { for (Horizontal h : Horizontal.values()) { if (h.constant == constant) return h; } return null; } int constant; Horizontal(int constant) { this.constant = constant; } } public enum Vertical { Top(SwingConstants.TOP), Center(SwingConstants.CENTER), Bottom( SwingConstants.BOTTOM); public static Vertical valueOf(int constant) { for (Vertical v : Vertical.values()) { if (v.constant == constant) return v; } return null; } int constant; Vertical(int constant) { this.constant = constant; } } Map<String, Class> buttonUITypeMap = new HashMap<>(); JComboBox<String> buttonUIClassComboBox = new JComboBox<>(); FontComboBox fontComboBox; JLabel fontDescriptor = new JLabel(""); JComboBox<String> iconComboBox = new JComboBox<>(); JTextField text = new JTextField("Name"); JCheckBox paintBorderCheckbox = new JCheckBox("Border", true); JCheckBox paintContentCheckbox = new JCheckBox("Content", true); JCheckBox paintFocusCheckbox = new JCheckBox("Focus", true); JComboBox<Horizontal> horizontalAlignmentComboBox = new JComboBox<>( Horizontal.values()); JComboBox<Horizontal> horizontalTextPositionComboBox = new JComboBox<>( Horizontal.values()); JComboBox<Vertical> verticalAlignmentComboBox = new JComboBox<>( Vertical.values()); JComboBox<Vertical> verticalTextPositionComboBox = new JComboBox<>( Vertical.values()); JComboBox<String> aquaTypeComboBox; JComboBox<String> sizeVariantComboBox = new JComboBox<String>(new String[] { "regular", "small", "mini" }); JComboBox<String> segmentPositionComboBox = new JComboBox<String>( new String[] { "only", "first", "middle", "last" }); public JButtonDemo() { super(false, false, true); fontComboBox = new FontComboBox(new UIManagerFontFactory()); JButton dummyButton = new JButton(); List<Class> buttonUITypes = new ArrayList<>(); LookAndFeelInfo[] lafs = UIManager.getInstalledLookAndFeels(); for (LookAndFeelInfo lafInfo : lafs) { try { LookAndFeel laf = (LookAndFeel) Class.forName( lafInfo.getClassName()).newInstance(); laf.initialize(); UIDefaults defaults = laf.getDefaults(); JButton testButton = new JButton("test"); ButtonUI ui = (ButtonUI) defaults.getUI(testButton); try { testButton.setUI(ui); // only keep the UI if the call to setUI didn't throw an // exception: buttonUITypes.add(ui.getClass()); } catch (Exception e) { // Nimbus throws an exception resembling: // @formatter:off // java.lang.ClassCastException: com.apple.laf.AquaLookAndFeel cannot be cast to javax.swing.plaf.nimbus.NimbusLookAndFeel // at javax.swing.plaf.nimbus.NimbusStyle.validate(NimbusStyle.java:250) // at javax.swing.plaf.nimbus.NimbusStyle.getValues(NimbusStyle.java:806) // at javax.swing.plaf.nimbus.NimbusStyle.getInsets(NimbusStyle.java:485) // at javax.swing.plaf.synth.SynthStyle.installDefaults(SynthStyle.java:913) // at javax.swing.plaf.synth.SynthLookAndFeel.updateStyle(SynthLookAndFeel.java:265) // at javax.swing.plaf.synth.SynthButtonUI.updateStyle(SynthButtonUI.java:79) // at javax.swing.plaf.synth.SynthButtonUI.installDefaults(SynthButtonUI.java:62) // at javax.swing.plaf.basic.BasicButtonUI.installUI(BasicButtonUI.java:88) // at javax.swing.JComponent.setUI(JComponent.java:666) // @formatter:on } } catch (Exception e) { e.printStackTrace(); } } buttonUITypes.add(BevelButtonUI.class); buttonUITypes.add(CapsuleButtonUI.class); buttonUITypes.add(GradientButtonUI.class); buttonUITypes.add(PlasticButtonUI.class); buttonUITypes.add(RecessedButtonUI.class); buttonUITypes.add(RetroButtonUI.class); buttonUITypes.add(RoundRectButtonUI.class); buttonUITypes.add(SquareButtonUI.class); buttonUITypes.add(TexturedButtonUI.class); buttonUITypes.add(VistaButtonUI.class); buttonUITypes.add(XPButtonUI.class); buttonUITypes.add(XPSubtleButtonUI.class); for (Class buttonUIType : buttonUITypes) { buttonUITypeMap.put(buttonUIType.getSimpleName(), buttonUIType); buttonUIClassComboBox.addItem(buttonUIType.getSimpleName()); } buttonUIClassComboBox.setSelectedItem(dummyButton.getUI().getClass() .getSimpleName()); ActionListener actionRefreshListener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { refreshButton(); } }; Inspector inspector = createConfigurationInspector(300); inspector.addRow(new JLabel("ButtonUI:"), buttonUIClassComboBox); inspector.addRow(new JLabel("Font:"), fontComboBox); inspector.addRow(new JLabel(""), fontDescriptor); inspector.addRow(new JLabel("Icon:"), iconComboBox); inspector.addRow(new JLabel("Text:"), text, true); inspector.addRow(new JLabel("Horizontal Alignment:"), horizontalAlignmentComboBox); inspector.addRow(new JLabel("Horizontal Text Position:"), horizontalTextPositionComboBox); inspector.addRow(new JLabel("Vertical Alignment:"), verticalAlignmentComboBox); inspector.addRow(new JLabel("Vertical Text Position:"), verticalTextPositionComboBox); inspector.addRow(new JLabel("Paint:"), paintBorderCheckbox, paintContentCheckbox, paintFocusCheckbox); horizontalAlignmentComboBox.setSelectedItem(Horizontal .valueOf(dummyButton.getHorizontalAlignment())); horizontalTextPositionComboBox.setSelectedItem(Horizontal .valueOf(dummyButton.getHorizontalTextPosition())); verticalAlignmentComboBox.setSelectedItem(Vertical.valueOf(dummyButton .getVerticalAlignment())); verticalTextPositionComboBox.setSelectedItem(Vertical .valueOf(dummyButton.getVerticalTextPosition())); if (JVM.isMac) { inspector.addSeparator(); // String[] aquaTypes = getAquaTypes(); String[] aquaTypes = new String[] { NONE, "bevel", "capsule", "combobox", "comboboxEndCap", "comboboxInternal", "disclosure", "gradient", "help", "icon", "recessed", "round", "roundRect", "scrollColumnSizer", "segmented", "segmentedCapsule", "segmentedGradient", "segmentedRoundRect", "segmentedTextured", "segmentedTexturedRounded", "square", "text", "textured", "texturedRound", "toggle", "toolbar", "well" }; aquaTypeComboBox = new JComboBox<String>(aquaTypes); inspector.addRow(new JLabel("Button Type:"), aquaTypeComboBox); inspector.addRow(new JLabel("Segment Position:"), segmentPositionComboBox); inspector.addRow(new JLabel("Size Variant:"), sizeVariantComboBox); aquaTypeComboBox.addActionListener(actionRefreshListener); sizeVariantComboBox.addActionListener(actionRefreshListener); segmentPositionComboBox.addActionListener(actionRefreshListener); } iconComboBox.addItem("None"); iconComboBox.addItem("Thumbnail"); iconComboBox.addItem("Refresh"); buttonUIClassComboBox.addActionListener(actionRefreshListener); fontComboBox.addActionListener(actionRefreshListener); iconComboBox.addActionListener(actionRefreshListener); paintBorderCheckbox.addActionListener(actionRefreshListener); paintContentCheckbox.addActionListener(actionRefreshListener); paintFocusCheckbox.addActionListener(actionRefreshListener); horizontalAlignmentComboBox.addActionListener(actionRefreshListener); horizontalTextPositionComboBox.addActionListener(actionRefreshListener); verticalAlignmentComboBox.addActionListener(actionRefreshListener); verticalTextPositionComboBox.addActionListener(actionRefreshListener); text.getDocument().addDocumentListener(new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { refreshButton(); } @Override public void removeUpdate(DocumentEvent e) { refreshButton(); } @Override public void changedUpdate(DocumentEvent e) { refreshButton(); } }); // AbstractButton b = new JButton(); // b.setIconTextGap(iconTextGap); // b.setMargin(m); paintBorderCheckbox .setToolTipText("This controls AbstractButton#setBorderPainted(boolean)"); paintContentCheckbox .setToolTipText("This controls AbstractButton#setContentAreaFilled(boolean)"); paintFocusCheckbox .setToolTipText("This controls AbstractButton#setFocusPainted(boolean)"); refreshButton(); } /** * Return the different types of Aqua buttons we can render. * <p> * This method looks up a static map in com.apple.laf classes to identify * its list of values. */ private static String[] getAquaTypes() { if (!JVM.isMac) return new String[] {}; try { Class c = Class.forName("com.apple.laf.AquaButtonExtendedTypes"); Map map = (Map) Reflection.invokeMethod(c, null, "getAllTypes"); SortedSet<String> names = new TreeSet<>(); names.addAll(map.keySet()); return names.toArray(new String[names.size()]); } catch (Exception e) { e.printStackTrace(); } return new String[] {}; } private AbstractButton lastButton; private void refreshButton() { try { Font font = fontComboBox.getSelectedFont(); fontDescriptor.setText(" " + font.getName() + " " + font.getSize2D()); AbstractButton button = new JButton(); String buttonUIClass = (String) buttonUIClassComboBox .getSelectedItem(); ButtonUI buttonUI = (ButtonUI) buttonUITypeMap.get(buttonUIClass) .newInstance(); button.setUI(buttonUI); if (lastButton != null) button.setSelected(lastButton.isSelected()); button.setFont(font); button.setText(text.getText()); lastButton = button; if (iconComboBox.getSelectedIndex() == 1) { button.setIcon(new ImageIcon(getThumbnail())); } else if (iconComboBox.getSelectedIndex() == 2) { button.setIcon(new RefreshIcon(30)); } Horizontal hAlign = (Horizontal) horizontalAlignmentComboBox .getSelectedItem(); button.setHorizontalAlignment(hAlign.constant); Horizontal hTextPos = (Horizontal) horizontalTextPositionComboBox .getSelectedItem(); button.setHorizontalTextPosition(hTextPos.constant); Vertical vAlign = (Vertical) verticalAlignmentComboBox .getSelectedItem(); button.setVerticalAlignment(vAlign.constant); Vertical vTextPos = (Vertical) verticalTextPositionComboBox .getSelectedItem(); button.setVerticalTextPosition(vTextPos.constant); button.setBorderPainted(paintBorderCheckbox.isSelected()); button.setContentAreaFilled(paintContentCheckbox.isSelected()); button.setFocusPainted(paintFocusCheckbox.isSelected()); if (aquaTypeComboBox != null) { String buttonType = (String) aquaTypeComboBox.getSelectedItem(); if (!NONE.equalsIgnoreCase(buttonType)) { button.putClientProperty("JButton.buttonType", buttonType); } if (buttonType.startsWith("segmented")) { segmentPositionComboBox.setEnabled(true); button.putClientProperty("JButton.segmentPosition", (String) segmentPositionComboBox.getSelectedItem()); } else { segmentPositionComboBox.setEnabled(false); } button.putClientProperty("JComponent.sizeVariant", (String) sizeVariantComboBox.getSelectedItem()); } examplePanel.removeAll(); examplePanel.add(button); } catch (Exception e) { e.printStackTrace(); examplePanel.removeAll(); examplePanel.add(new JLabel("Error: see console")); } finally { examplePanel.revalidate(); } } static BufferedImage thumbnail = null; private static BufferedImage getThumbnail() { if (thumbnail == null) { BufferedImage bi = ImageLoader.createImage(AlphaCompositeDemo.class .getResource("balloon.png")); thumbnail = Scaling.scaleProportionally(bi, new Dimension(30, 30)); } return thumbnail; } @Override public String getTitle() { return "JButton Demo"; } @Override public String getSummary() { return "This demonstrates basic button configurations in Swing."; } @Override public URL getHelpURL() { // TODO Auto-generated method stub return null; } @Override public String[] getKeywords() { return new String[] { "button", "ux", "ui", "Swing" }; } @Override public Class<?>[] getClasses() { return new Class[] { JButton.class, QButtonUI.class }; } }
package be.belgif.vocab.ldf; import be.belgif.vocab.App; import be.belgif.vocab.helpers.QueryHelper; import java.net.URI; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.UriBuilder; import org.eclipse.rdf4j.model.BNode; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Model; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.impl.LinkedHashModel; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.model.vocabulary.FOAF; import org.eclipse.rdf4j.model.vocabulary.RDF; import org.eclipse.rdf4j.model.vocabulary.VOID; import org.eclipse.rdf4j.model.vocabulary.XMLSchema; import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.GraphQuery; import org.eclipse.rdf4j.query.MalformedQueryException; import org.eclipse.rdf4j.query.QueryEvaluationException; import org.eclipse.rdf4j.query.QueryResults; import org.eclipse.rdf4j.query.TupleQuery; import org.eclipse.rdf4j.repository.Repository; import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.repository.RepositoryException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Helper class for querying the triple store using Linked Data Fragments. * * @author Bart.Hanssens */ public class QueryHelperLDF { private final static Logger LOG = (Logger) LoggerFactory.getLogger(QueryHelperLDF.class); public final static String LDF = "_ldf"; private final static ValueFactory F = SimpleValueFactory.getInstance(); private final static String PREFIX = App.getPrefix(); private final static BNode LDF_SEARCH = F.createBNode("search"); private final static Value S = F.createLiteral("s"); private final static Value P = F.createLiteral("p"); private final static Value O = F.createLiteral("o"); private final static BNode LDF_MAP_S = F.createBNode("s"); private final static BNode LDF_MAP_P = F.createBNode("p"); private final static BNode LDF_MAP_O = F.createBNode("o"); private final static String PAGE = "page"; private final static int PAGING = 50; private final static Value PAGING_VAL = F.createLiteral("50", XMLSchema.INTEGER); private final static String Q_COUNT = "SELECT (COUNT(*) AS ?cnt) " + "WHERE { ?s ?p ?o } "; private final static String Q_COUNT_GRAPH = "SELECT (COUNT(*) AS ?cnt) " + "WHERE { GRAPH ?graph { ?s ?p ?o } } "; private final static String Q_LDF = "CONSTRUCT { ?s ?p ?o } " + "WHERE { ?s ?p ?o } " + //"ORDER BY ?s ?p ?o " + "LIMIT " + PAGING; private final static String Q_LDF_GRAPH = "CONSTRUCT { ?s ?p ?o } " + "WHERE { GRAPH ?graph { ?s ?p ?o } } " + //"ORDER BY ?s ?p ?o " + "LIMIT " + PAGING; /** * Convert string into IRI or null * @param s string * @return IRI or null */ private static IRI createIRI(String s) { // Variable if (s.equals("") || (s.startsWith("?") && s.length() > 1)) { return null; } // IRI return F.createIRI(s); } /** * Convert string into literal or URI * * @param s object * @return value (literal or URI) */ private static Value createLiteralOrUri(String s) { if (s.startsWith("\"")) { // test for simple literal if (s.endsWith("\"")) { return F.createLiteral(s.substring(1, s.length() - 2)); } // test for language tag int l = s.lastIndexOf("\"@"); if (l > 0) { return F.createLiteral(s.substring(1, l), s.substring(l + 2)); } // test for data type int t = s.lastIndexOf("\"^^"); if (l > 0) { return F.createLiteral(s.substring(1, l), s.substring(l + 3)); } // malformed LOG.warn("Malformed object value"); return null; } return createIRI(s); } /** * Hydra counters and other metadata * * @param m triples * @param graph hydra named graph * @param part page or fragment IRI * @param count total number of results * @return RDF triples */ private static void meta(Model m, IRI graph, IRI part, int count) { m.add(graph, FOAF.PRIMARY_TOPIC, part, graph); Value total = F.createLiteral(String.valueOf(count), XMLSchema.INTEGER); m.add(part, RDF.TYPE, Hydra.PARTIAL, graph); m.add(part, Hydra.ITEMS, PAGING_VAL, graph); // as per spec two properties with same value m.add(part, VOID.TRIPLES, total, graph); m.add(part, Hydra.TOTAL, total, graph); } /** * Hydra search template mappings * * @param m triples * @param graph hydra graph * @param vocab name of the vocabulary * @param dataset dataset IRI */ private static void template(Model m, IRI graph, String vocab, IRI dataset) { String path = vocab.isEmpty() ? "" : "/" + vocab; // search template m.add(dataset, Hydra.SEARCH, LDF_SEARCH, graph); m.add(LDF_SEARCH, Hydra.TEMPLATE, F.createLiteral(PREFIX + LDF + path + "{?s,p,o}"), graph); m.add(LDF_SEARCH, Hydra.MAPPING, LDF_MAP_S, graph); m.add(LDF_SEARCH, Hydra.MAPPING, LDF_MAP_P, graph); m.add(LDF_SEARCH, Hydra.MAPPING, LDF_MAP_O, graph); // generic mapping m.add(LDF_MAP_S, Hydra.VARIABLE, S, graph); m.add(LDF_MAP_S, Hydra.PROPERTY, RDF.SUBJECT, graph); m.add(LDF_MAP_P, Hydra.VARIABLE, P, graph); m.add(LDF_MAP_P, Hydra.PROPERTY, RDF.PREDICATE, graph); m.add(LDF_MAP_O, Hydra.VARIABLE, O, graph); m.add(LDF_MAP_O, Hydra.PROPERTY, RDF.OBJECT, graph); } /** * Hydra pagination * * @param m triples * @param graph hydra named graph * @param part page or fragment * @param builder URI Builder * @param current current page number * @param count total number of results * @param offset offset */ private static void page(Model m, IRI graph, IRI part, int current, int count, int offset, UriBuilder builder) { // pagination, page count starts at 1 if (offset >= PAGING) { URI prevPage = builder.build(current - 1, PAGE); m.add(part, Hydra.PREVIOUS, F.createIRI(prevPage.toString()), graph); } if (offset + PAGING < count) { URI nextPage = builder.build(current + 1, PAGE); m.add(part, Hydra.NEXT, F.createIRI(nextPage.toString()), graph); } } /** * Hydra hypermedia controls * * @param m triples * @param vocab vocabulary name * @param dataset dataset IRI * @param builder URI Builder * @param offset offset * @param count total number of triples * @param isFrag true if fragment was requested (true if page) */ private static void hyperControls(Model m, String vocab, IRI dataset, UriBuilder builder, int offset, int count, boolean isFrag) { IRI graph = QueryHelper.asGraph("/" + vocab + "#hydra"); IRI fragment = F.createIRI(builder.build().toString()); builder.queryParam(PAGE, "{page}"); int current = (offset / PAGING) + 1; IRI page = F.createIRI(builder.build(current, PAGE).toString()); m.add(dataset, RDF.TYPE, VOID.DATASET, graph); m.add(dataset, RDF.TYPE, Hydra.COLLECTION, graph); m.add(dataset, VOID.SUBSET, fragment, graph); m.add(fragment, VOID.SUBSET, page, graph); template(m, graph, vocab, dataset); meta(m, graph, isFrag ? fragment : page, count); page(m, graph, isFrag ? fragment : page, current, count, offset, builder); } /** * Get fragment / one page of results * * @param m triples * @param conn repository * @param subj subject IRI * @param pred predicate IRI * @param obj object value * @param graph named graph * @param offset */ private static void getFragment(Model m, RepositoryConnection conn, IRI subj, IRI pred, Value obj, IRI graph, long offset, long count) { // nothing (more) to show if ((count <= 0) || (offset >= count)) { return; } String qry = (graph != null) ? Q_LDF_GRAPH : Q_LDF; GraphQuery gq = conn.prepareGraphQuery(qry + " OFFSET " + offset); if (subj != null) { gq.setBinding("s", subj); } if (pred != null) { gq.setBinding("p", pred); } if (obj != null) { gq.setBinding("o", obj); } if (graph != null) { gq.setBinding("graph", graph); } m.addAll(QueryResults.asModel(gq.evaluate())); } /** * Count number of results * * @param conn repository * @param subj subject IRI * @param pred predicate IRI * @param obj object value * @param graph named graph * @return number of results */ private static int getCount(RepositoryConnection conn, IRI subj, IRI pred, Value obj, IRI graph) { TupleQuery tq = conn.prepareTupleQuery((graph != null) ? Q_COUNT_GRAPH : Q_COUNT); if (subj != null) { tq.setBinding("s", subj); } if (pred != null) { tq.setBinding("p", pred); } if (obj != null) { tq.setBinding("o", obj); } if (graph != null) { tq.setBinding("graph", graph); } BindingSet res = QueryResults.singleResult(tq.evaluate()); String val = res.getValue("cnt").stringValue(); return Integer.valueOf(val); } /** * Set namespaces * * @param m * @return */ private static Model setNamespaces(Model m) { Model ns = QueryHelper.setNamespaces(m); ns.setNamespace(Hydra.PREFIX, Hydra.NAMESPACE); return ns; } /** * Get linked data fragment * * @param repo RDF store * @param s subject to search for or null * @param p predicate to search for or null * @param o object to search for or null * @param vocab named graph * @param page page number * @return RDF model */ public static Model getLDF(Repository repo, String s, String p, String o, String vocab, String page) { boolean isFrag = (page == null || page.isEmpty()); // check parameters int pageVal = isFrag ? 1 : Integer.valueOf(page); if (pageVal < 1) { throw new WebApplicationException("Invalid (zero or negative) page number"); } IRI subj = (s != null) ? createIRI(s) : null; IRI pred = (p != null) ? createIRI(p) : null; Value obj = (o != null) ? createLiteralOrUri(o) : null; UriBuilder builder = UriBuilder.fromUri(PREFIX).path(LDF).path(vocab); if (s != null) { builder = builder.queryParam("s", s); } if (p != null) { builder = builder.queryParam("p", p); } if (o != null) { builder = builder.queryParam("o", o); } int offset = (pageVal - 1) * PAGING; // speedup: vocabularies are stored in separate graphs IRI graph = (!vocab.isEmpty()) ? QueryHelper.asGraph(vocab) : null; IRI dataset = QueryHelper.asDataset(vocab); try (RepositoryConnection conn = repo.getConnection()) { int count = getCount(conn, subj, pred, obj, graph); Model m = new LinkedHashModel(); hyperControls(m, vocab, dataset, builder, offset, count, isFrag); getFragment(m, conn, subj, pred, obj, graph, offset, count); return setNamespaces(m); } catch (RepositoryException|MalformedQueryException|QueryEvaluationException e) { throw new WebApplicationException(e); } } }
/* * This class builds the demo data. */ package com.validation.manager.core; import com.validation.manager.core.db.Project; import com.validation.manager.core.db.Requirement; import com.validation.manager.core.db.controller.ProjectJpaController; import com.validation.manager.core.db.controller.RequirementJpaController; import com.validation.manager.core.db.controller.exceptions.NonexistentEntityException; import com.validation.manager.core.server.core.ProjectServer; import com.validation.manager.core.server.core.RequirementServer; import com.validation.manager.core.server.core.RequirementSpecNodeServer; import com.validation.manager.core.server.core.RequirementSpecServer; import com.validation.manager.core.server.core.TestCaseServer; import com.validation.manager.core.server.core.TestPlanServer; import com.validation.manager.core.server.core.TestProjectServer; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.logging.Logger; import org.openide.util.Exceptions; /** * * @author Javier Ortiz Bultron <javier.ortiz.78@gmail.com> */ public class DemoBuilder { private static final Logger LOG = Logger.getLogger(DemoBuilder.class.getSimpleName()); public static void buildDemoProject() throws Exception { LOG.info("Creating demo projects..."); ProjectJpaController controller = new ProjectJpaController(DataBaseManager .getEntityManagerFactory()); Project rootProject = new Project("Demo"); controller.create(rootProject); for (int i = 0; i < 5; i++) { Project temp = new Project("Sub " + (i + 1)); controller.create(temp); addDemoProjectRequirements(temp); addDemoProjectTestProject(temp); rootProject.getProjectList().add(temp); } addDemoProjectRequirements(rootProject); controller.edit(rootProject); LOG.info("Done!"); } private static void addDemoProjectRequirements(Project p) throws Exception { for (int i = 0; i < 5; i++) { //Create a spec RequirementSpecServer temp = new RequirementSpecServer("Spec " + i, "Description " + i, p.getId(), 1); temp.write2DB(); RequirementSpecNodeServer node = temp.addSpecNode("Node " + i, "Description " + i, "Scope " + i); for (int y = 0; y < 5; y++) { RequirementServer req = new RequirementServer("Requirement " + (y + 1) * (i + 1), "Description " + (y + 1) * (i + 1), node.getRequirementSpecNodePK(), "Notes", 1, 1); req.write2DB(); node.getRequirementList().add(req.getEntity()); } node.write2DB(); p.getRequirementSpecList().add(temp.getEntity()); } new ProjectJpaController(DataBaseManager .getEntityManagerFactory()).edit(p); } private static void addDemoProjectTestProject(Project p) throws NonexistentEntityException, Exception { TestProjectServer tp = new TestProjectServer("Test Project", true); tp.setName("Test Project"); tp.setNotes("Notes"); tp.setActive(true); tp.write2DB(); //Add the test structur TestPlanServer tps = new TestPlanServer(tp.getEntity(), true, true); tps.setName("Test Plan tps.setNotes("Notes"); tps.write2DB(); for (int i = 0; i < 5; i++) { //Add steps TestCaseServer tcs = new TestCaseServer("Test Case new Date()); tcs.write2DB(); for (int j = 0; j < 5; j++) { List<Requirement> requirements = new RequirementJpaController(DataBaseManager .getEntityManagerFactory()) .findRequirementEntities().subList(j * 5, j * 5 + 5); tcs.addStep((j + 1), "Step #" + (j + 1), "Note", "Criteria", requirements); } tcs.write2DB(); tps.addTestCase(tcs.getEntity()); } ProjectServer ps = new ProjectServer(p); ps.setTestProjectList(new ArrayList<>()); ps.getTestProjectList().add(tp.getEntity()); //Save it ps.write2DB(); } public static void main(String[] args) { try { DataBaseManager.setPersistenceUnitName("TestVMPU"); buildDemoProject(); } catch (Exception ex) { Exceptions.printStackTrace(ex); } } }
package cat.nyaa.autobloodmoon.arena; import cat.nyaa.autobloodmoon.AutoBloodmoon; import cat.nyaa.autobloodmoon.I18n; import cat.nyaa.autobloodmoon.api.InfernalMobsAPI; import cat.nyaa.autobloodmoon.events.MobListener; import cat.nyaa.autobloodmoon.level.Level; import cat.nyaa.autobloodmoon.mobs.Mob; import cat.nyaa.autobloodmoon.stats.PlayerStats; import cat.nyaa.autobloodmoon.utils.GetCircle; import cat.nyaa.autobloodmoon.utils.RandomLocation; import cat.nyaa.utils.ISerializable; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.OfflinePlayer; import org.bukkit.Particle; import org.bukkit.block.Block; import org.bukkit.entity.EntityType; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.scheduler.BukkitRunnable; import org.bukkit.util.Vector; import java.util.ArrayList; import java.util.HashMap; import java.util.Random; import java.util.UUID; public class Arena extends BukkitRunnable implements ISerializable { public ArrayList<UUID> players = new ArrayList<>(); public Level level; public int currentLevel = 0; public String kitName; public int nextWave = 0; public int lastSpawn = 0; public ArrayList<UUID> infernalMobs = new ArrayList<>(); public ArrayList<UUID> normalMobs = new ArrayList<>(); public ArrayList<UUID> entityList = new ArrayList<>(); public ArenaState state; public HashMap<UUID, PlayerStats> playerStats = new HashMap<>(); @Serializable private String name; @Serializable private String world; @Serializable private int radius; @Serializable private int spawnRadius; @Serializable private double x; @Serializable private double y; @Serializable private double z; private AutoBloodmoon plugin; private int time = 0; private int infernal; private int ticks = 0; private long sendBorderParticle = 0; public String getWorld() { return world; } public void setWorld(String world) { this.world = world; } public Location getCenterPoint() { return new Location(Bukkit.getWorld(getWorld()), x, y, z); } public void setCenterPoint(Location loc) { x = loc.getX(); y = loc.getY(); z = loc.getZ(); world = loc.getWorld().getName(); } public int getRadius() { return radius; } public void setRadius(int radius) { this.radius = radius; } public int getSpawnRadius() { return spawnRadius; } public void setSpawnRadius(int spawnRadius) { this.spawnRadius = spawnRadius; } public String getName() { return name; } public void setName(String name) { this.name = name; } public void init(AutoBloodmoon plugin, Level level, String kitName) { this.plugin = plugin; this.level = level; this.kitName = kitName; state = ArenaState.WAIT; nextWave = plugin.cfg.call_timeout; this.runTaskTimer(this.plugin, 20, 1); broadcast(I18n._("user.game.new_game_0")); broadcast(I18n._("user.game.new_game_1", level.getLevelType().toString(), level.getMaxInfernalLevel(), level.getMinPlayerAmount())); broadcast(I18n._("user.game.new_game_2")); } public void join(Player player) { if (!players.contains(player.getUniqueId())) { players.add(player.getUniqueId()); PlayerStats stats = getPlayerStats(player); stats.incrementStats(PlayerStats.StatsType.JOINED); playerStats.put(player.getUniqueId(), stats); if (players.size() >= level.getMinPlayerAmount()) { broadcast(I18n._("user.game.join", player.getName(), players.size(), players.size() + 1)); } else { broadcast(I18n._("user.game.join", player.getName(), players.size(), level.getMinPlayerAmount())); } if (state == ArenaState.PLAYING) { plugin.teleportUtil.Teleport(player, getCenterPoint()); } } } public boolean quit(Player player) { if (players.contains(player.getUniqueId())) { players.remove(player.getUniqueId()); broadcast(I18n._("user.game.quit", player.getName())); broadcast(I18n._("user.game.players_remaining", players.size())); return true; } else { return false; } } public void start() { state = ArenaState.PLAYING; lockTime(); for (Player p : Bukkit.getOnlinePlayers()) { if (players.contains(p.getUniqueId())) { plugin.teleportUtil.Teleport(p, getCenterPoint()); } } nextWave = 0; currentLevel = 0; } public void stop() { state = ArenaState.STOP; this.cancel(); removeAllMobs(); for (UUID k : playerStats.keySet()) { plugin.statsManager.getPlayerStats(k).add(playerStats.get(k)); } plugin.cfg.statsConfig.save(); plugin.currentArena = null; } @Override public void run() { time++; ticks++; sendBorderParticle(); if (state == ArenaState.WAIT) { nextWave if (nextWave <= 0) { if (players.size() >= level.getMinPlayerAmount()) { this.start(); } else { broadcast(I18n._("user.game.cancel")); this.stop(); } } else { if (nextWave <= 300) { if (players.size() >= level.getMinPlayerAmount()) { if (ticks >= 20) { broadcast(I18n._("user.game.start", nextWave / 20)); ticks = 0; return; } } else { broadcast(I18n._("user.game.cancel")); this.stop(); } } else { } } } else if (state == ArenaState.PLAYING) { if (nextWave <= 0) { if (currentLevel > 0 && time - lastSpawn >= level.getMobSpawnDelayTicks() && this.normalMobs.size() < players.size() * level.getMobAmount()) { spawnMob(); lastSpawn = time; return; } if (currentLevel == 0 || (infernalMobs.isEmpty() && normalMobs.size() >= players.size() * level.getMobAmount() && currentLevel < level.getMaxInfernalLevel())) { nextWave = plugin.cfg.preparation_time; normalMobs.clear(); currentLevel++; broadcast(I18n._("user.game.next_wave", nextWave / 20)); broadcast(I18n._("user.game.level", currentLevel)); return; } if (infernalMobs.isEmpty() && currentLevel >= level.getMaxInfernalLevel() && !players.isEmpty() && normalMobs.size() >= players.size() * level.getMobAmount()) { broadcast(I18n._("user.game.win")); for (UUID k : players) { playerStats.get(k).incrementStats(PlayerStats.StatsType.WINING); } stop(); return; } } if (ticks >= 20) { ticks = 0; lockTime(); if (!infernalMobs.isEmpty()) { ArrayList<UUID> tmp = new ArrayList<>(); for (LivingEntity entity : getCenterPoint().getWorld().getLivingEntities()) { if (!entity.isDead() && infernalMobs.contains(entity.getUniqueId()) && InfernalMobsAPI.isInfernalMob(entity)) { Location location = getCenterPoint().clone(); location.setY(entity.getLocation().getY()); if (location.distance(entity.getLocation()) > getRadius()) { Location loc = getRandomLocation(); if (loc != null) { entity.teleport(loc); } } tmp.add(entity.getUniqueId()); } } if (tmp.size() != infernalMobs.size()) { infernalMobs = tmp; broadcast(I18n._("user.game.mobs_remaining", infernalMobs.size())); } } } nextWave if (players.isEmpty()) { broadcast(I18n._("user.game.fail")); stop(); } } else { cancel(); } } public Location getRandomLocation() { return RandomLocation.RandomLocation(getCenterPoint(), getSpawnRadius(), getRadius()); } public void spawnMob() { infernal++; if (infernal == level.getInfernalAmount()) { Mob mob = plugin.mobManager.getRandomMob(currentLevel); Location loc = getRandomLocation(); if (mob != null && loc != null) { plugin.mobListener.spawnLocation = loc; plugin.mobListener.mobType = MobListener.MobType.INFERNAL; if (InfernalMobsAPI.spawnMob(mob.getMobType(), mob.getSkills(), loc)) { plugin.mobListener.spawnLocation = null; } } infernal = 0; return; } else { Location loc = getRandomLocation(); if (loc != null) { String mob = plugin.cfg.mobConfig.normalMob.get(new Random().nextInt( plugin.cfg.mobConfig.normalMob.size())); plugin.mobListener.spawnLocation = loc; plugin.mobListener.mobType = MobListener.MobType.NORMAL; loc.getWorld().spawnEntity(loc, EntityType.valueOf(mob.toUpperCase())); } } } public void broadcast(String s) { for (Player p : plugin.getServer().getOnlinePlayers()) { p.sendMessage(I18n._("user.prefix") + s); } } public Arena clone() { Arena arena = new Arena(); arena.setName(getName()); arena.setCenterPoint(getCenterPoint()); arena.setRadius(getRadius()); arena.setSpawnRadius(getSpawnRadius()); return arena; } public void lockTime() { if (getCenterPoint().getWorld().getTime() != 18000) { getCenterPoint().getWorld().setTime(18000); } } public void removeAllMobs() { for (LivingEntity entity : getCenterPoint().getWorld().getLivingEntities()) { if (entityList.contains(entity.getUniqueId())) { entity.remove(); } } } public void sendBorderParticle() { if (plugin.cfg.border_particle && System.currentTimeMillis() - sendBorderParticle >= 4000) { sendBorderParticle = System.currentTimeMillis(); for (Block block : GetCircle.getCylinder(getCenterPoint(), getCenterPoint().getWorld(), getRadius(), getRadius(), plugin.cfg.border_particle_height, false)) { block.getWorld().spawnParticle(Particle.BARRIER, block.getLocation().add( new Vector(0.5D, 0.5D, 0.5D)), 1); } } } public PlayerStats getPlayerStats(OfflinePlayer player) { if (!playerStats.containsKey(player.getUniqueId())) { playerStats.put(player.getUniqueId(), new PlayerStats(player)); } return playerStats.get(player.getUniqueId()); } public enum ArenaState { WAIT, PLAYING, STOP } }
package com.scorpiac.javarant; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.gson.stream.JsonReader; import com.scorpiac.javarant.exceptions.AuthenticationException; import com.scorpiac.javarant.exceptions.NoSuchRantException; import com.scorpiac.javarant.exceptions.NoSuchUserException; import org.apache.http.NameValuePair; import org.apache.http.client.fluent.Request; import org.apache.http.message.BasicNameValuePair; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class DevRant { static final String APP_ID = "3"; static final String PLAT_ID = "3"; static final String BASE_URL = "https: static final String AVATARS_URL = "https://avatars.devrant.io"; static final String USER_URL = "/users"; static final String RANT_URL = "/rants"; static final String COLLAB_URL = "/collabs"; // API endpoints. static final String API = "/api"; static final String API_DEVRANT = API + "/devrant"; static final String API_RANTS = API_DEVRANT + "/rants"; static final String API_SEARCH = API_DEVRANT + "/search"; static final String API_SURPRISE = API_RANTS + "/surprise"; static final String API_USERS = API + "/users"; static final String API_USER_ID = API + "/get-user-id"; static final String API_WEEKLY = API_DEVRANT + "/weekly-rants"; static final String API_COLLABS = API_DEVRANT + "/collabs"; static final String API_STORIES = API_DEVRANT + "/story-rants"; static final String API_AUTH_TOKEN = API_USERS + "/auth-token"; static final String API_COMMENT = "/comments"; static final String API_VOTE = "/vote"; static final String API_NOTIFS = API_USERS + "/me/notif-feed"; private Auth auth; /** * Log in to devRant. * * @param username The username. * @param password The password. * @throws AuthenticationException If the login data is invalid. */ public void login(String username, char[] password) throws AuthenticationException { if (auth != null) throw new IllegalStateException("A user is already logged in."); JsonObject json = post(API_AUTH_TOKEN, new BasicNameValuePair("username", username), new BasicNameValuePair("password", String.valueOf(password)) ); // Clear the password. for (int i = 0; i < password.length; i++) password[i] = 0; if (!Util.jsonSuccess(json)) throw new AuthenticationException(); auth = Auth.fromJson(json); } /** * Log out of devRant. */ public void logout() { auth = null; } /** * Check whether a user is logged in. * * @return {@code true} if a user is logged in. */ public boolean isLoggedIn() { return auth != null; } /** * Get a list of rants. * * @param sort The sorting method. * @param limit How many rants to get. * @param skip How many rants to skip. * @return An array of rants. */ public Rant[] rants(Sort sort, int limit, int skip) { // Rants url, app id, sort, skip, limit. String url = String.format("%1$s?app=%2$s&sort=%3$s&skip=%4$d&limit=%5$d", API_RANTS, APP_ID, sort.toString(), skip, limit); JsonObject json = get(url); // Check for success. if (!Util.jsonSuccess(json)) return null; return Util.jsonToList(json.get("rants").getAsJsonArray(), elem -> Rant.fromJson(this, elem.getAsJsonObject())).toArray(new Rant[0]); } /** * Search for rants matching a certain term. * * @param term The term to search for. * @return An array of rants matching the search term. */ public Rant[] search(String term) { // Search url, app id, term. String url = String.format("%1$s?app=%2$s&term=%3$s", API_SEARCH, APP_ID, term); JsonObject json = get(url); // Check for success. if (!Util.jsonSuccess(json)) return null; return Util.jsonToList(json.get("results").getAsJsonArray(), elem -> Rant.fromJson(this, elem.getAsJsonObject())).toArray(new Rant[0]); } /** * Get a random rant with at least 15 +1's. * * @return A random rant. */ public Rant surprise() { // Surprise url, app id. String url = String.format("%1$s?app=%2$s", API_SURPRISE, APP_ID); JsonObject json = get(url); // Check for success. if (!Util.jsonSuccess(json)) return null; return Rant.fromJson(this, json.get("rant").getAsJsonObject()); } /** * Get the weekly rants. * * @return The weekly rants. */ public Rant[] weekly() { // Weekly url, app id. String url = String.format("%1$s?app=%2$s", API_WEEKLY, APP_ID); JsonObject json = get(url); // Check for success. if (!Util.jsonSuccess(json)) return null; return Util.jsonToList(json.get("rants").getAsJsonArray(), elem -> Rant.fromJson(this, elem.getAsJsonObject())).toArray(new Rant[0]); } /** * Get the collab rants. * * @return The collab rants. */ public Collab[] collabs() { // Collab url, app id. String url = String.format("%1$s?app=%2$s&", API_COLLABS, APP_ID); JsonObject json = get(url); // Check for success. if (!Util.jsonSuccess(json)) return null; return Util.jsonToList(json.get("rants").getAsJsonArray(), elem -> Collab.fromJson(this, elem.getAsJsonObject())).toArray(new Collab[0]); } /** * Get a rant by its id. * * @param id The id of the rant to get. * @return The rant. */ public Rant getRant(int id) { // Rants url, rant id, app id. String url = String.format("%1$s/%2$d?app=%3$s", DevRant.API_RANTS, id, DevRant.APP_ID); JsonObject json = get(url); // Check if the rant exists. if (!Util.jsonSuccess(json)) throw new NoSuchRantException(id); return Rant.fromJson(this, json.get("rant").getAsJsonObject(), json.get("comments").getAsJsonArray()); } /** * Get a collab by its id. * * @param id The id of the collab to get. * @return The collab. */ public Collab getCollab(int id) { // Collabs url, collab id, app id. String url = String.format("%1$s/%2$d?app=%3$s", DevRant.API_RANTS, id, DevRant.APP_ID); JsonObject json = get(url); // Check if the collab exists. if (!Util.jsonSuccess(json)) throw new NoSuchRantException(id); return Collab.fromJson(this, json.get("rant").getAsJsonObject(), json.get("comments").getAsJsonArray()); } /** * Get a user by their username. * * @param username The username of the user to get. * @return The user. */ public User getUser(String username) { // Users url, user id, app id. String url = String.format("%1$s?app=%2$s&username=%3$s", DevRant.API_USER_ID, DevRant.APP_ID, username); JsonObject json = get(url); // Check if the user exists. if (!Util.jsonSuccess(json)) throw new NoSuchUserException(username); return getUser(json.get("user_id").getAsInt()); } /** * Get a user by their id. * * @param id The id of the user to get. * @return The user. */ public User getUser(int id) { return new User(this, id); } /** * Vote on a rant. * * @param rant The rant to vote on. * @param vote The vote. * @return Whether the vote was successful. */ public boolean vote(Rant rant, Vote vote) { return voteRant(rant.getId(), vote); } /** * Vote on a rant. * * @param id The id of the rant. * @param vote The vote. * @return Whether the vote was successful. */ public boolean voteRant(int id, Vote vote) { // Rants url, id, vote url. String url = String.format("%1$s/%2$d%3$s", API_RANTS, id, API_VOTE); return Util.jsonSuccess(post(url, new BasicNameValuePair("vote", String.valueOf(vote.getValue())))); } /** * Vote on a comment. * * @param comment The comment to vote on. * @param vote The vote. * @return Whether the vote was successful. */ public boolean vote(Comment comment, Vote vote) { return voteComment(comment.getId(), vote); } /** * Vote on a comment. * * @param id The id of the comment. * @param vote The vote. * @return Whether the vote was successful. */ public boolean voteComment(int id, Vote vote) { // API url, comments url, id, vote url. String url = String.format("%1$s%2$s/%3$d%4$s", API, API_COMMENT, id, API_VOTE); return Util.jsonSuccess(post(url, new BasicNameValuePair("vote", String.valueOf(vote.getValue())))); } /** * Post a rant. * * @param rant The content of the rant. * @param tags The tags. * @return Whether posting the rant was successful. */ public boolean postRant(String rant, String tags) { return Util.jsonSuccess(post(API_RANTS, new BasicNameValuePair("rant", rant), new BasicNameValuePair("tags", tags) )); } /** * Post a comment. * * @param rant The rant to post the comment on. * @param comment The content of the comment. * @return Whether posting the comment was successful. */ public boolean postComment(Rant rant, String comment) { return postComment(rant.getId(), comment); } /** * Post a comment. * * @param rantId The id of the rant to post the comment on. * @param comment The content of the comment. * @return Whether posting the comment was successful. */ public boolean postComment(int rantId, String comment) { // Rants url, rant, comments url. String url = String.format("%1$s/%2$d%3$s", API_RANTS, rantId, API_COMMENT); return Util.jsonSuccess(post(url, new BasicNameValuePair("comment", comment))); } /** * Make a POST-request to the devRant server. * * @param url The url to make the request to. * @param params The parameters to post. * @return A {@link JsonObject} containing the response. */ JsonObject post(String url, NameValuePair... params) { List<NameValuePair> paramList = new ArrayList<>(params.length + 5); paramList.addAll(Arrays.asList(params)); // Add the parameters which always need to be present. paramList.add(new BasicNameValuePair("app", APP_ID)); paramList.add(new BasicNameValuePair("plat", PLAT_ID)); // Add the auth information. if (isLoggedIn()) { paramList.add(new BasicNameValuePair("token_id", auth.getId())); paramList.add(new BasicNameValuePair("token_key", auth.getKey())); paramList.add(new BasicNameValuePair("user_id", auth.getUserId())); } return executeRequest(Request.Post(BASE_URL + url).bodyForm(paramList)); } /** * Make a GET-request to the devRant server. * * @param url The url to make the request to. * @return A {@link JsonObject} containing the response. */ JsonObject get(String url) { return executeRequest(Request.Get(BASE_URL + url)); } /** * Execute a request and parse the response. * * @param request The request to execute. * @return A {@link JsonObject} containing the response. */ private static JsonObject executeRequest(Request request) { // Make the request and get the returned content as a stream. InputStream stream; try { stream = request.execute().returnContent().asStream(); } catch (IOException e) { e.printStackTrace(); return null; } // Parse the response as json. try (JsonReader reader = new JsonReader(new InputStreamReader(stream))) { return new JsonParser().parse(reader).getAsJsonObject(); } catch (IOException e) { e.printStackTrace(); return null; } } }
package cc.twittertools.post.embed; import cc.twittertools.post.Pair; import cc.twittertools.post.Sigil; import cc.twittertools.post.old.Tweet; import cc.twittertools.post.tabwriter.TabWriter; import com.google.common.collect.Sets; import com.sun.org.apache.regexp.internal.RE; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.mutable.MutableInt; import java.net.URI; import java.util.Optional; import java.util.Set; /** * A tweet fragment embedded in an original Tweet where it's referenced as a retweet. */ public class Retweet { private final Set<String> hashTags; private final String author; private final String msg; private final Set<String> addressees; private final long id; private final Optional<WebExcerpt> embeddedPage; private final Optional<Retweet> embeddedRetweet; private final boolean isManualRetweet; public Retweet (long id, String author, String msg, Optional<WebExcerpt> embeddedPage, Optional<Retweet> embeddedRetweet) { this( /* hashTags = */ Sets.newHashSet(Sigil.HASH_TAG.extractSigils(msg).getRight()), /* author = */ author, /* msg = */ msg, /* addressees = */ Sets.newHashSet(Sigil.ADDRESSEE.extractSigils(msg).getRight()), id, /* embeddedPage = */ embeddedPage, /* embeddedRetweet = */ embeddedRetweet ); } public Retweet(Set<String> hashTags, String author, String msg, Set<String> addressees, long id, Optional<WebExcerpt> embeddedPage, Optional<Retweet> embeddedRetweet) { super(); assert hashTags != null : "Hash tags set can be empty but not null"; assert ! StringUtils.isBlank(author) : "Username can be neither blank nor null"; assert msg != null : "Message cannot be null"; assert addressees != null : "Addressees cannot be null"; assert id > 0 : "ID must be strictly positive"; if (embeddedRetweet.isPresent()) { Retweet e = embeddedRetweet.get(); msg = removeFirstCaseInsensitive(msg, "https://twitter.com/" + e.getAuthor() + "/status/" + e.getId()); } if (embeddedPage.isPresent()) { WebExcerpt e = embeddedPage.get(); msg = removeFirstCaseInsensitive(msg, e.getUri().toASCIIString()); } this.hashTags = hashTags; this.author = author; this.msg = msg; this.addressees = addressees; this.id = id; this.embeddedPage = embeddedPage; this.embeddedRetweet = embeddedRetweet; this.isManualRetweet = ! Sigil.RETWEET.extractSigils(msg).getRight().isEmpty(); } public static String removeFirstCaseInsensitive (String haystack, String needle) { int pos = org.apache.commons.lang3.StringUtils.indexOfIgnoreCase(haystack, needle); return pos > 0 ? haystack.substring(0, pos) + haystack.substring(Math.min(pos + needle.length(), haystack.length())) : haystack; } public Set<String> getHashTags() { return hashTags; } public String getAuthor() { return author; } public String getMsg() { return msg; } public Set<String> getAddressees() { return addressees; } public long getId() { return id; } public Optional<WebExcerpt> getEmbeddedPage() { return embeddedPage; } public Optional<Retweet> getEmbeddedRetweet() { return embeddedRetweet; } /** * Returns the text of this message, all retweets, and any text ex * @return */ public String getAllText(boolean includeWebExcerpts) { return getMsg() + ' ' + getEmbeddedPage().flatMap(WebExcerpt::getExcerpt) .map(e -> e.getTitle() + " " + e.getBody()) .orElse("") + ' ' + getEmbeddedRetweet().map(r -> r.getAllText(includeWebExcerpts)) .orElse(""); } /** * Does this tweet/retweet itself contain a retweeted tweet * <p> * Modern twitter allows nested retweets * @return */ public boolean containsRetweet() { return embeddedRetweet.isPresent(); } public boolean isManualRetweet() { return isManualRetweet; } @Override public String toString() { return "@" + author + " : \t " + msg + embeddedPage.map(p -> " [" + p.toString() + "]").orElse("") + embeddedRetweet.map(r -> " Retweeting " + r.toString()).orElse(""); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Retweet retweet = (Retweet) o; if (id != retweet.id) return false; if (!author.equals(retweet.author)) return false; if (!msg.equals(retweet.msg)) return false; return embeddedPage.equals(retweet.embeddedPage); } @Override public int hashCode() { int result = author.hashCode(); result = 31 * result + msg.hashCode(); result = 31 * result + (int) (id ^ (id >>> 32)); result = 31 * result + embeddedPage.hashCode(); return result; } public String getMsgLessSigils() { String msg = this.msg; for (String addressee : addressees) msg = Sigil.ADDRESSEE.stripFromMsg(msg, addressee); for (String hashTag : hashTags) msg = Sigil.HASH_TAG.stripFromMsg(msg, hashTag); return msg; } public static final TabWriter<Retweet> WRITER = new TabWriter<Retweet>() { @Override public String asTabDelimStr(Retweet val) { return val.getAuthor() + '\t' + val.getId() + '\t' + val.getMsg() + '\t' + WebExcerpt.WRITER.asTabDelimStr(val.getEmbeddedPage()) + '\t' + asTabDelimStr(val.getEmbeddedRetweet()); } @Override public Pair<Retweet, Integer> fromTabDelimParts(String[] parts, int from) { String author = parts[from + 0]; long id = Long.parseLong(parts[from + 1]); String msg = parts[from + 2]; Pair<Optional<WebExcerpt>, Integer> exPair = WebExcerpt.WRITER.optFromTabDelimParts(parts, from + 3); Pair<Optional<Retweet>, Integer> rtPair = this.optFromTabDelimParts(parts, exPair.getRight()); return Pair.of (new Retweet(id, author, msg, exPair.getLeft(), rtPair.getLeft()), rtPair.getRight()); } }; /** * Returns a shallow copy of this retweet with the given tweet embedded within it * as another retweet. * * If the given optional is empty, just return this as is. */ public Retweet withEmbeddedRetweet(URI embeddedRetweetUri, Retweet embeddedRetweet) { if (this.containsRetweet()) { throw new IllegalStateException("This tweet already contains a retweet"); } return new Retweet ( id, author, StringUtils.replaceOnce(msg, embeddedRetweetUri.toASCIIString(), ""), embeddedPage, Optional.of(embeddedRetweet) ); } }
package cn.ranta.demo.luceneengine; import java.io.IOException; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; /** * Hello world! * */ public class Program { public static void main(String[] args) { Directory directory = new RAMDirectory(); Analyzer analyzer = new StandardAnalyzer(); CreateIndex(directory, analyzer); Search(directory, analyzer); System.out.println("the end."); } private static void CreateIndex(Directory directory, Analyzer analyzer) { try { IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer); IndexWriter indexWriter = new IndexWriter(directory, indexWriterConfig); for (int i = 0; i < 100; i++) { Document document = new Document(); FieldType storeOnlyFieldType = new FieldType(); storeOnlyFieldType.setStored(true); document.add(new Field("StoreOnly", String.format("Store Only %d %d", i / 10, i % 10), storeOnlyFieldType)); FieldType indexOnlyFieldType = new FieldType(); indexOnlyFieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); document.add(new Field("IndexOnly", String.format("Index Only %d %d", i / 10, i % 10), indexOnlyFieldType)); FieldType storeIndexFieldType = new FieldType(); storeIndexFieldType.setStored(true); //storeIndexFieldType.setTokenized(false);// storeIndexFieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); document.add(new Field("StoreIndex", String.format("Store Index %d %d", i / 10, i % 10), storeIndexFieldType)); indexWriter.addDocument(document); } // indexWriter.commit(); indexWriter.close(); } catch (IOException e) { e.printStackTrace(); } } private static void Search(Directory directory, Analyzer analyzer) { try { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = new IndexSearcher(indexReader); TopDocs topDocs = null; int demo = 6; switch (demo) { case 0: { //TermQuery //setTokenized(false) to see this demo. TermQuery termQuery = new TermQuery(new Term("StoreIndex", "Store Index 5 6")); topDocs = indexSearcher.search(termQuery, 5); } break; case 1: { //TermQuery TermQuery termQuery = new TermQuery(new Term("StoreOnly", "7")); topDocs = indexSearcher.search(termQuery, 5); } break; case 2: { //TermQuery TermQuery termQuery = new TermQuery(new Term("IndexOnly", "7")); topDocs = indexSearcher.search(termQuery, 5); } break; case 3: { //TermQuery TermQuery termQuery = new TermQuery(new Term("StoreIndex", "7")); topDocs = indexSearcher.search(termQuery, 5); } break; case 4: { //BooleanQuery MUST TermQuery termQuery6 = new TermQuery(new Term("IndexOnly", "6")); TermQuery termQuery7 = new TermQuery(new Term("IndexOnly", "7")); BooleanQuery booleanQuery = new BooleanQuery.Builder() .add(new BooleanClause(termQuery6, Occur.MUST)) .add(new BooleanClause(termQuery7, Occur.MUST)) .build(); topDocs = indexSearcher.search(booleanQuery, 20); } break; case 5: { //BooleanQuery SHOULD TermQuery termQuery6 = new TermQuery(new Term("IndexOnly", "6")); TermQuery termQuery7 = new TermQuery(new Term("IndexOnly", "7")); BooleanQuery booleanQuery = new BooleanQuery.Builder() .add(new BooleanClause(termQuery6, Occur.SHOULD)) .add(new BooleanClause(termQuery7, Occur.SHOULD)) .build(); topDocs = indexSearcher.search(booleanQuery, 20); } break; case 6: { //BooleanQuery MUST & SHOULD TermQuery termQuery5 = new TermQuery(new Term("IndexOnly", "5")); TermQuery termQuery6 = new TermQuery(new Term("IndexOnly", "6")); TermQuery termQuery7 = new TermQuery(new Term("IndexOnly", "7")); TermQuery termQuery8 = new TermQuery(new Term("IndexOnly", "8")); BooleanQuery must56 = new BooleanQuery.Builder() .add(new BooleanClause(termQuery5, Occur.MUST)) .add(new BooleanClause(termQuery6, Occur.MUST)) .build(); BooleanQuery must78 = new BooleanQuery.Builder() .add(new BooleanClause(termQuery7, Occur.MUST)) .add(new BooleanClause(termQuery8, Occur.MUST)) .build(); BooleanQuery booleanQuery = new BooleanQuery.Builder() .add(new BooleanClause(must56, Occur.SHOULD)) .add(new BooleanClause(must78, Occur.SHOULD)) .build(); topDocs = indexSearcher.search(booleanQuery, 20); } break; default: break; } if (topDocs != null && topDocs.scoreDocs.length > 0) { for (ScoreDoc scoreDoc : topDocs.scoreDocs) { Document document = indexReader.document(scoreDoc.doc); // IndexableField storeOnlyField = document.getField("StoreOnly"); // if (storeOnlyField != null) { // System.out.println(storeOnlyField.stringValue()); // } else { // System.out.println("null"); // IndexableField indexOnlyField = document.getField("IndexOnly"); // if (indexOnlyField != null) { // System.out.println(indexOnlyField.stringValue()); // } else { // System.out.println("null"); IndexableField storeIndexField = document.getField("StoreIndex"); if (storeIndexField != null) { System.out.println(storeIndexField.stringValue()); } else { System.out.println("null"); } System.out.println(""); } } indexReader.close(); } catch (Exception e) { e.printStackTrace(); } } }
package com.sun.akuma; import com.sun.jna.StringArray; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Arrays; import static java.util.logging.Level.FINEST; import java.util.logging.Logger; import java.io.IOException; import java.io.FileInputStream; import java.io.File; import java.io.ByteArrayOutputStream; import java.io.RandomAccessFile; import java.io.DataInputStream; import static com.sun.akuma.CLibrary.LIBC; /** * List of arguments for Java VM and application. * * @author Kohsuke Kawaguchi */ public class JavaVMArguments extends ArrayList<String> { public JavaVMArguments() { } public JavaVMArguments(Collection<? extends String> c) { super(c); } public void removeSystemProperty(String name) { name = "-D"+name; String nameeq = name+'='; for (Iterator<String> itr = this.iterator(); itr.hasNext();) { String s = itr.next(); if(s.equals(name) || s.startsWith(nameeq)) itr.remove(); } } public void setSystemProperty(String name, String value) { removeSystemProperty(name); // index 0 is the executable name add(1,"-D"+name+"="+value); } /** * Removes the n items from the end. * Useful for removing all the Java arguments to rebuild them. */ public void removeTail(int n) { removeAll(subList(size()-n,size())); } /*package*/ StringArray toStringArray() { return new StringArray(toArray(new String[size()])); } /** * Gets the process argument list of the current process. */ public static JavaVMArguments current() throws IOException { String os = System.getProperty("os.name"); if("Linux".equals(os)) return currentLinux(); if("SunOS".equals(os)) return currentSolaris(); throw new UnsupportedOperationException("Unsupported Operating System "+os); } private static JavaVMArguments currentLinux() throws IOException { int pid = LIBC.getpid(); String cmdline = readFile(new File("/proc/" + pid + "/cmdline")); JavaVMArguments args = new JavaVMArguments(Arrays.asList(cmdline.split("\0"))); // we don't want them inherited args.removeSystemProperty(Daemon.class.getName()); args.removeSystemProperty(NetworkServer.class.getName()+".mode"); return args; } private static JavaVMArguments currentSolaris() throws IOException { int pid = LIBC.getpid(); RandomAccessFile psinfo = new RandomAccessFile(new File("/proc/"+pid+"/psinfo"),"r"); try { //typedef struct psinfo { // int pr_flag; /* process flags */ // int pr_nlwp; /* number of lwps in the process */ // pid_t pr_pid; /* process id */ // pid_t pr_ppid; /* process id of parent */ // pid_t pr_pgid; /* process id of process group leader */ // pid_t pr_sid; /* session id */ // uid_t pr_uid; /* real user id */ // uid_t pr_euid; /* effective user id */ // gid_t pr_gid; /* real group id */ // gid_t pr_egid; /* effective group id */ // uintptr_t pr_addr; /* address of process */ // size_t pr_size; /* size of process image in Kbytes */ // size_t pr_rssize; /* resident set size in Kbytes */ // dev_t pr_ttydev; /* controlling tty device (or PRNODEV) */ // ushort_t pr_pctcpu; /* % of recent cpu time used by all lwps */ // ushort_t pr_pctmem; /* % of system memory used by process */ // timestruc_t pr_start; /* process start time, from the epoch */ // timestruc_t pr_time; /* cpu time for this process */ // timestruc_t pr_ctime; /* cpu time for reaped children */ // char pr_fname[PRFNSZ]; /* name of exec'ed file */ // char pr_psargs[PRARGSZ]; /* initial characters of arg list */ // int pr_wstat; /* if zombie, the wait() status */ // int pr_argc; /* initial argument count */ // uintptr_t pr_argv; /* address of initial argument vector */ // uintptr_t pr_envp; /* address of initial environment vector */ // char pr_dmodel; /* data model of the process */ // lwpsinfo_t pr_lwp; /* information for representative lwp */ //} psinfo_t; // for the size of the various datatype. // for how to read this information psinfo.seek(8); if(adjust(psinfo.readInt())!=pid) throw new IOException("psinfo PID mismatch"); // sanity check psinfo.seek(0xBC); // now jump to pr_argc int argc = adjust(psinfo.readInt()); int argp = adjust(psinfo.readInt()); RandomAccessFile as = new RandomAccessFile(new File("/proc/"+pid+"/as"),"r"); try { JavaVMArguments args = new JavaVMArguments(); for( int n=0; n<argc; n++ ) { // read a pointer to one entry as.seek(to64(argp+n*4)); int p = adjust(as.readInt()); args.add(readLine(as, p, "argv["+ n +"]")); } return args; } finally { as.close(); } } finally { psinfo.close(); } } /** * {@link DataInputStream} reads a value in big-endian, so * convert it to the correct value on little-endian systems. */ private static int adjust(int i) { if(IS_LITTLE_ENDIAN) return (i<<24) |((i<<8) & 0x00FF0000) | ((i>>8) & 0x0000FF00) | (i>>>24); else return i; } /** * int to long conversion with zero-padding. */ private static long to64(int i) { return i&0xFFFFFFFFL; } private static String readLine(RandomAccessFile as, int p, String prefix) throws IOException { if(LOGGER.isLoggable(FINEST)) LOGGER.finest("Reading "+prefix+" at "+p); as.seek(to64(p)); ByteArrayOutputStream buf = new ByteArrayOutputStream(); int ch,i=0; while((ch=as.read())>0) { if((++i)%100==0 && LOGGER.isLoggable(FINEST)) LOGGER.finest(prefix +" is so far "+buf.toString()); buf.write(ch); } String line = buf.toString(); if(LOGGER.isLoggable(FINEST)) LOGGER.finest(prefix+" was "+line); return line; } /** * Reads the entire file. */ private static String readFile(File f) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); FileInputStream fin = new FileInputStream(f); try { int sz; byte[] buf = new byte[1024]; while((sz=fin.read(buf))>=0) { baos.write(buf,0,sz); } return baos.toString(); } finally { fin.close(); } } private static final boolean IS_LITTLE_ENDIAN = "little".equals(System.getProperty("sun.cpu.endian")); private static final Logger LOGGER = Logger.getLogger(JavaVMArguments.class.getName()); }
package com.twilio.wiztowar; import com.google.common.collect.ImmutableMap; import com.yammer.dropwizard.Service; import com.yammer.dropwizard.config.*; import com.yammer.dropwizard.jersey.JacksonMessageBodyProvider; import com.yammer.dropwizard.json.ObjectMapperFactory; import com.yammer.dropwizard.servlets.ThreadNameFilter; import com.yammer.dropwizard.tasks.TaskServlet; import com.yammer.dropwizard.util.Generics; import com.yammer.dropwizard.validation.Validator; import com.yammer.metrics.HealthChecks; import com.yammer.metrics.core.HealthCheck; import com.yammer.metrics.reporting.AdminServlet; import com.yammer.metrics.util.DeadlockHealthCheck; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.ServletHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.ServletContext; import javax.servlet.ServletRegistration; import javax.ws.rs.core.Application; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.EventListener; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * The {@link DWAdapter} adapts a Dropwizard {@link Service} to be hooked in to the lifecycle of a WAR. */ public abstract class DWAdapter<T extends Configuration> extends Application { /** * The {@link Logger} to use. */ final static Logger logger = LoggerFactory.getLogger(DWAdapter.class); /** * The Jersey singletons. */ private HashSet<Object> singletons; /** * The Jersey classes. */ private HashSet<Class<?>> classes; /** * The {@link Service} which we are adapting. */ private Service dwService; /** * The {@link Service} which we are adapting. */ private ExtendedEnvironment environment; /** * Implementation of the Jersey Application. Returns the classes. * * @return the Jersey configured classes */ @Override public Set<Class<?>> getClasses() { synchronized (this) { if (dwService == null) { initialize(); } } return classes; } /** * Implementation of the Jersey Application. Returns the singletons. * * @return the Jersey configured singletons. */ @Override public Set<Object> getSingletons() { synchronized (this) { if (dwService == null) { initialize(); } } return singletons; } /** * Initialize the {@link Service} and configure the Servlet and Jersey environment. */ private void initialize() { try { dwService = getSingletonService(); if (dwService == null) { throw new IllegalStateException("The singleton service is null"); } final Bootstrap<T> bootstrap = new Bootstrap<T>(dwService); dwService.initialize(bootstrap); final T configuration = parseConfiguration(getConfigurationFile(), getConfigurationClass(), bootstrap.getObjectMapperFactory().copy()); if (configuration != null) { logger.info("The WizToWar adapter defers logging configuration to the application server"); new LoggingFactory(configuration.getLoggingConfiguration(), bootstrap.getName()).configure(); } final Validator validator = new Validator(); environment = new ExtendedEnvironment(bootstrap.getName(), configuration, bootstrap.getObjectMapperFactory(), validator); try { environment.start(); bootstrap.runWithBundles(configuration, environment); dwService.run(configuration, environment); addHealthChecks(environment); final ServletContext servletContext = ServletContextCallback.getServletContext(); if (servletContext == null) { throw new IllegalStateException("ServletContext is null"); } //Set the static DWAdapter so that we can shutdown ServletContextCallback.setDWAdapter(this); createInternalServlet(environment, servletContext); createExternalServlet(environment, configuration.getHttpConfiguration(), servletContext); environment.validateJerseyResources(); environment.logEndpoints(configuration); // Now collect the Jersey configuration singletons = new HashSet<Object>(); singletons.addAll(environment.getJerseyResourceConfig().getSingletons()); classes = new HashSet<Class<?>>(); classes.addAll(environment.getJerseyResourceConfig().getClasses()); } catch (Exception e) { logger.error("Error {} ", e); throw new IllegalStateException(e); } } catch (Exception e) { logger.error("Error {} ", e); throw new IllegalStateException(e); } } /** * Parse the configuration from the {@link File}. * * @param file the {@link File} containing the configuration. * @param configurationClass the configuration class * @param objectMapperFactory the {@link ObjectMapperFactory} to use * @return the configuration instance * @throws IOException * @throws ConfigurationException */ private T parseConfiguration(final File file, final Class<T> configurationClass, final ObjectMapperFactory objectMapperFactory) throws IOException, ConfigurationException { final ConfigurationFactory<T> configurationFactory = ConfigurationFactory.forClass(configurationClass, new Validator(), objectMapperFactory); if (file != null) { if (!file.exists()) { throw new FileNotFoundException("File " + file + " not found"); } return configurationFactory.build(file); } return configurationFactory.build(); } /** * Retrieve the configuration class. * * @return the configuration class. */ protected Class<T> getConfigurationClass() { return Generics.getTypeParameter(getClass(), Configuration.class); } /** * This method is adapted from ServerFactory.createInternalServlet. */ private void createInternalServlet(final ExtendedEnvironment env, final ServletContext context) { if (context.getMajorVersion() >= 3) { // Add the Task servlet final ServletRegistration.Dynamic taskServlet = context.addServlet("TaskServlet", new TaskServlet(env.getTasks())); taskServlet.setAsyncSupported(true); /** * This method is adapted from ServerFactory.createExternalServlet. * * @param env the {@link ExtendedEnvironment} from which we find the resources to act on. * @param context the {@link ServletContext} to add to */ private void createExternalServlet(ExtendedEnvironment env, HttpConfiguration config, ServletContext context) { context.addFilter("ThreadNameFilter", ThreadNameFilter.class); if (!env.getProtectedTargets().isEmpty()) { logger.warn("The WizToWar adapter doesn't support protected targets"); } for (ImmutableMap.Entry<String, ServletHolder> entry : env.getServlets().entrySet()) { context.addServlet(entry.getKey(), entry.getValue().getServletInstance()); } env.addProvider(new JacksonMessageBodyProvider(env.getObjectMapperFactory().build(), env.getValidator())); for (ImmutableMap.Entry<String, FilterHolder> entry : env.getFilters().entries()) { context.addFilter(entry.getKey(), entry.getValue().getFilter()); } for (EventListener listener : env.getServletListeners()) { context.addListener(listener); } for (Map.Entry<String, String> entry : config.getContextParameters().entrySet()) { context.setInitParameter(entry.getKey(), entry.getValue()); } if (env.getSessionHandler() != null) { logger.warn("The WizToWar adapter doesn't support custom session handlers."); } } /** * This method is adapted from ServerFactory.buildServer. * * @param env the {@link ExtendedEnvironment} to get {@link HealthCheck}s from. */ private void addHealthChecks(ExtendedEnvironment env) { HealthChecks.defaultRegistry().register(new DeadlockHealthCheck()); for (HealthCheck healthCheck : env.getHealthChecks()) { HealthChecks.defaultRegistry().register(healthCheck); } if (env.getHealthChecks().isEmpty()) { logger.warn('\n' + "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" + "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" + "! THIS SERVICE HAS NO HEALTHCHECKS. THIS MEANS YOU WILL NEVER KNOW IF IT !\n" + "! DIES IN PRODUCTION, WHICH MEANS YOU WILL NEVER KNOW IF YOU'RE LETTING !\n" + "! YOUR USERS DOWN. YOU SHOULD ADD A HEALTHCHECK FOR EACH DEPENDENCY OF !\n" + "! YOUR SERVICE WHICH FULLY (BUT LIGHTLY) TESTS YOUR SERVICE'S ABILITY TO !\n" + "! USE THAT SERVICE. THINK OF IT AS A CONTINUOUS INTEGRATION TEST. !\n" + "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" + "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ); } } /** * Override to provide your particular Dropwizard Service. * * @return your {@link Service} */ public abstract Service getSingletonService(); /** * Override to provide your configuration {@link File} location. * * @return the {@link File} to read the configuration from. */ public abstract File getConfigurationFile(); public void shutDown() { try { this.environment.stop(); } catch (Exception e) { logger.error("Failed to stop environment cleanly due to {}", e); } } }
package CustomOreGen.Server; import java.awt.Frame; import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.Map; import java.util.Random; import net.minecraft.block.BlockSand; import net.minecraft.client.gui.GuiCreateWorld; import net.minecraft.client.gui.GuiScreen; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.nbt.CompressedStreamTools; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.server.MinecraftServer; import net.minecraft.world.ChunkCoordIntPair; import net.minecraft.world.World; import net.minecraft.world.WorldServer; import net.minecraft.world.biome.BiomeGenBase; import net.minecraft.world.chunk.storage.RegionFileCache; import net.minecraft.world.storage.ISaveFormat; import net.minecraft.world.storage.SaveFormatOld; import net.minecraft.world.storage.WorldInfo; import CustomOreGen.CustomOreGenBase; import CustomOreGen.CustomPacketPayload; import CustomOreGen.CustomPacketPayload.PayloadType; import CustomOreGen.GeometryData; import CustomOreGen.GeometryRequestData; import CustomOreGen.MystcraftSymbolData; import CustomOreGen.Server.GuiCustomOreGenSettings.GuiOpenMenuButton; import CustomOreGen.Util.GeometryStream; import CustomOreGen.Util.SimpleProfiler; import cpw.mods.fml.relauncher.ReflectionHelper; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; public class ServerState { private static MinecraftServer _server = null; private static Map<World,WorldConfig> _worldConfigs = new HashMap(); private static Map<Integer,Map<ChunkCoordIntPair,int[]>> _populatedChunks = new HashMap(); private static Object _optionsGuiButton = null; private static boolean isChunkSavedPopulated(World world, int chunkX, int chunkZ) { File saveFolder = getWorldConfig(world).dimensionDir; DataInputStream stream = RegionFileCache.getChunkInputStream(saveFolder, chunkX, chunkZ); if (stream != null) { try { NBTTagCompound ex = CompressedStreamTools.read(stream); if (ex.hasKey("Level") && ex.getCompoundTag("Level").getBoolean("TerrainPopulated")) { return true; } } catch (IOException var6) { ; } } return false; } public static WorldConfig getWorldConfig(World world) { WorldConfig cfg = _worldConfigs.get(world); while (cfg == null) { try { cfg = new WorldConfig(world); validateOptions(cfg.getConfigOptions(), true); validateDistributions(cfg.getOreDistributions(), true); } catch (Exception var4) { if (onConfigError(var4)) { cfg = null; continue; } cfg = WorldConfig.createEmptyConfig(); } _worldConfigs.put(world, cfg); } return cfg; } public static void clearWorldConfig(World world) { _worldConfigs.remove(world); } public static boolean onConfigError(Throwable error) { CustomOreGenBase.log.error("Problem loading world config", error); Frame[] frames = Frame.getFrames(); if (frames != null && frames.length > 0) { switch ((new ConfigErrorDialog()).showDialog(frames[0], error)) { case 1: return true; case 2: return false; } } return false; } public static void validateDistributions(Collection distributions, boolean cull) throws IllegalStateException { Iterator it = distributions.iterator(); while (it.hasNext()) { IOreDistribution dist = (IOreDistribution)it.next(); if (!dist.validate() && cull) { it.remove(); } } } public static void validateOptions(Collection options, boolean cull) { Iterator it = options.iterator(); while (it.hasNext()) { ConfigOption option = (ConfigOption)it.next(); if (cull && option instanceof ConfigOption.DisplayGroup) { it.remove(); } } } public static void populateDistributions(Collection<IOreDistribution> distributions, World world, int chunkX, int chunkZ) { SimpleProfiler.globalProfiler.startSection("Populate"); BlockSand.fallInstantly = true; world.scheduledUpdatesAreImmediate = true; for (IOreDistribution dist : distributions) { dist.generate(world, chunkX, chunkZ); dist.populate(world, chunkX, chunkZ); dist.cull(); } world.scheduledUpdatesAreImmediate = false; BlockSand.fallInstantly = false; SimpleProfiler.globalProfiler.endSection(); } public static GeometryData getDebuggingGeometryData(GeometryRequestData request) { if (_server == null) { return null; } else if (request.world == null) { return null; } else { WorldConfig cfg = getWorldConfig(request.world); if (!cfg.debuggingMode) { return null; } else { int geomSize = 0; LinkedList streams = new LinkedList(); IOreDistribution dist; for (Iterator i$ = cfg.getOreDistributions().iterator(); i$.hasNext(); dist.cull()) { dist = (IOreDistribution)i$.next(); dist.generate(request.world, request.chunkX, request.chunkZ); GeometryStream stream = dist.getDebuggingGeometry(request.world, request.chunkX, request.chunkZ); if (stream != null) { streams.add(stream); geomSize += stream.getStreamDataSize(); } } return new GeometryData(request, streams); } } } /* For tracking which chunks have been populated, the server bins the chunks into 4x4 meta-chunks. * Each meta-chunk is represented by a 16 integer array, indexed by the local X coordinate of a * given chunk. Each integer contains two bits of information for every chunk, by splitting the * 4-byte integer into two 2-byte parts. The local Z coordinate indexes into * the 16 bits of each part. The least significant part indicates whether an attempt has been * made to populate the chunk, while the most significant indicates whether we have checked * if the chunk was marked populated in the save (presumably this is an expensive check and * thus is memoized). */ public static void onPopulateChunk(World world, Random rand, int chunkX, int chunkZ) { WorldConfig cfg = getWorldConfig(world); Map<ChunkCoordIntPair,int[]> dimChunkMap = null; int cRange = world.provider.dimensionId; dimChunkMap = _populatedChunks.get(cRange); if (dimChunkMap == null) { dimChunkMap = new HashMap(); _populatedChunks.put(cRange, dimChunkMap); } ChunkCoordIntPair neighborMax = new ChunkCoordIntPair(chunkX >>> 4, chunkZ >>> 4); int[] cX = dimChunkMap.get(neighborMax); if (cX == null) { cX = new int[16]; dimChunkMap.put(neighborMax, cX); } cX[chunkX & 15] |= 65537 << (chunkZ & 15); int var16 = (cfg.deferredPopulationRange + 15) / 16; int var17 = 4 * var16 * (var16 + 1) + 1; for (int var18 = chunkX - var16; var18 <= chunkX + var16; ++var18) { for (int cZ = chunkZ - var16; cZ <= chunkZ + var16; ++cZ) { int neighborCount = 0; for (int iX = var18 - var16; iX <= var18 + var16; ++iX) { for (int iZ = cZ - var16; iZ <= cZ + var16; ++iZ) { ChunkCoordIntPair chunkKey = new ChunkCoordIntPair(iX >>> 4, iZ >>> 4); int[] chunkData = dimChunkMap.get(chunkKey); if (chunkData == null) { chunkData = new int[16]; dimChunkMap.put(chunkKey, chunkData); } if ((chunkData[iX & 15] >>> (iZ & 15) & 65536) == 0) { boolean populated = isChunkSavedPopulated(world, iX, iZ); //if (populated) //FMLLog.info("[%d/%d](%d/%d): populated in save", var18, cZ, iX, iZ); chunkData[iX & 15] |= (populated ? 65537 : 65536) << (iZ & 15); } if ((chunkData[iX & 15] >>> (iZ & 15) & 1) != 0) { //FMLLog.info("[%d/%d](%d/%d): is neighbor", var18, cZ, iX, iZ); ++neighborCount; } } } if (neighborCount == var17) { //FMLLog.info("[%d/%d]: populating", var18, cZ); populateDistributions(cfg.getOreDistributions(), world, var18, cZ); } else { //FMLLog.info("[%d/%d]: only %d neighbors", var18, cZ, neighborCount); } } } } public static boolean checkIfServerChanged(MinecraftServer currentServer, WorldInfo worldInfo) { if (_server == currentServer) { return false; } else { if (currentServer != null && worldInfo == null) { if (currentServer.worldServers == null) { return false; } for (WorldServer world : currentServer.worldServers) { if (world != null) { worldInfo = world.getWorldInfo(); } if (worldInfo != null) { break; } } if (worldInfo == null) { return false; } } onServerChanged(currentServer, worldInfo); return true; } } public static void onServerChanged(MinecraftServer server, WorldInfo worldInfo) { _worldConfigs.clear(); WorldConfig.loadedOptionOverrides[1] = WorldConfig.loadedOptionOverrides[2] = null; _populatedChunks.clear(); _server = server; CustomOreGenBase.log.debug("Server world changed to " + worldInfo.getWorldName()); BiomeGenBase[] worldBaseDir = BiomeGenBase.getBiomeGenArray(); int saveFormat = worldBaseDir.length; File var8 = null; ISaveFormat var9 = _server.getActiveAnvilConverter(); if (var9 != null && var9 instanceof SaveFormatOld) { var8 = ((SaveFormatOld)var9).savesDirectory; } var8 = new File(var8, _server.getFolderName()); WorldConfig var10 = null; while (var10 == null) { try { var10 = new WorldConfig(worldInfo, var8); validateOptions(var10.getConfigOptions(), false); validateDistributions(var10.getOreDistributions(), false); } catch (Exception var7) { if (!onConfigError(var7)) { break; } var10 = null; } } } @SideOnly(Side.CLIENT) public static void onWorldCreationMenuTick(GuiCreateWorld gui) { if (gui == null) { _optionsGuiButton = null; } else { if (_optionsGuiButton == null) { WorldConfig.loadedOptionOverrides[0] = null; GuiCustomOreGenSettings button = new GuiCustomOreGenSettings(gui); _optionsGuiButton = new GuiOpenMenuButton(gui, 99, 0, 0, 150, 20, "Custom Ore Generation...", button); } GuiOpenMenuButton button1 = (GuiOpenMenuButton)_optionsGuiButton; Collection controlList = (Collection)ReflectionHelper.getPrivateValue(GuiScreen.class, gui, 3); if (!controlList.contains(button1)) { button1.xPosition = (gui.width - button1.getWidth()) / 2; button1.yPosition = 165; controlList.add(button1); } button1.visible = !((Boolean)ReflectionHelper.getPrivateValue(GuiCreateWorld.class, gui, 11)).booleanValue(); } } public static void onClientLogin(EntityPlayerMP player) { if (player.worldObj != null && CustomOreGenBase.hasMystcraft()) { Iterator i = getWorldConfig(player.worldObj).getMystcraftSymbols().iterator(); while (i.hasNext()) { MystcraftSymbolData symbolData = (MystcraftSymbolData)i.next(); (new CustomPacketPayload(PayloadType.MystcraftSymbolData, symbolData)).sendToClient(player); } } } public static void chunkForced(World world, ChunkCoordIntPair location) { WorldConfig cfg = getWorldConfig(world); int radius = (cfg.deferredPopulationRange + 15) / 16; for (int cX = location.chunkXPos - radius; cX <= location.chunkXPos + radius; ++cX) { for (int cZ = location.chunkZPos - radius; cZ <= location.chunkZPos + radius; ++cZ) { world.getChunkFromChunkCoords(cX, cZ); } } } }
package com.alibaba.fastjson; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.Reader; public abstract class JSONValidator implements Cloneable, Closeable { public enum Type { Object, Array, Value } protected boolean eof; protected int pos = -1; protected char ch; protected Type type; private Boolean valiateResult; protected int count = 0; protected boolean supportMultiValue = false; public static JSONValidator fromUtf8(byte[] jsonBytes) { return new UTF8Validator(jsonBytes); } public static JSONValidator fromUtf8(InputStream is) { return new UTF8InputStreamValidator(is); } public static JSONValidator from(String jsonStr) { return new UTF16Validator(jsonStr); } public static JSONValidator from(Reader r) { return new ReaderValidator(r); } public boolean isSupportMultiValue() { return supportMultiValue; } public JSONValidator setSupportMultiValue(boolean supportMultiValue) { this.supportMultiValue = supportMultiValue; return this; } public Type getType() { if (type == null) { validate(); } return type; } abstract void next(); public boolean validate() { if (valiateResult != null) { return valiateResult; } for (;;) { if (!any()) { valiateResult = false; return false; } count++; if (eof) { valiateResult = true; return true; } if (supportMultiValue) { skipWhiteSpace(); if (eof) { break; } continue; } else { valiateResult = false; return false; } } valiateResult = true; return true; } public void close() throws IOException { } private boolean any() { switch (ch) { case '{': next(); while (isWhiteSpace(ch)) { next(); } if (ch == '}') { next(); type = Type.Object; return true; } for (;;) { if (ch == '"') { fieldName(); } else { return false; } skipWhiteSpace(); if (ch == ':') { next(); } else { return false; } skipWhiteSpace(); if (!any()) { return false; } skipWhiteSpace(); if (ch == ',') { next(); skipWhiteSpace(); } else if (ch == '}') { next(); type = Type.Object; return true; } else { return false; } } case '[': next(); skipWhiteSpace(); if (ch == ']') { next(); type = Type.Array; return true; } for (;;) { if (!any()) { return false; } skipWhiteSpace(); if (ch == ',') { next(); skipWhiteSpace(); } else if (ch == ']') { next(); type = Type.Array; return true; } else { return false; } } case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case '+': case '-': if (ch == '-' || ch == '+') { next(); skipWhiteSpace(); if (ch < '0' || ch > '9') { return false; } } do { next(); } while (ch >= '0' && ch <= '9'); if (ch == '.') { next(); // bug fix: 0.e7 should not pass the test if (ch < '0' || ch > '9') { return false; } while (ch >= '0' && ch <= '9') { next(); } } if (ch == 'e' || ch == 'E') { next(); if (ch == '-' || ch == '+') { next(); } if (ch >= '0' && ch <= '9') { next(); } else { return false; } while (ch >= '0' && ch <= '9') { next(); } } type = Type.Value; break; case '"': next(); for (;;) { if (eof) { return false; } if (ch == '\\') { next(); if (ch == 'u') { next(); next(); next(); next(); next(); } else { next(); } } else if (ch == '"') { next(); type = Type.Value; return true; } else { next(); } } case 't': next(); if (ch != 'r') { return false; } next(); if (ch != 'u') { return false; } next(); if (ch != 'e') { return false; } next(); if (isWhiteSpace(ch) || ch == ',' || ch == ']' || ch == '}' || ch == '\0') { type = Type.Value; return true; } return false; case 'f': next(); if (ch != 'a') { return false; } next(); if (ch != 'l') { return false; } next(); if (ch != 's') { return false; } next(); if (ch != 'e') { return false; } next(); if (isWhiteSpace(ch) || ch == ',' || ch == ']' || ch == '}' || ch == '\0') { type = Type.Value; return true; } return false; case 'n': next(); if (ch != 'u') { return false; } next(); if (ch != 'l') { return false; } next(); if (ch != 'l') { return false; } next(); if (isWhiteSpace(ch) || ch == ',' || ch == ']' || ch == '}' || ch == '\0') { type = Type.Value; return true; } return false; default: return false; } return true; } protected void fieldName() { next(); for (; ; ) { if (ch == '\\') { next(); if (ch == 'u') { next(); next(); next(); next(); next(); } else { next(); } } else if (ch == '"') { next(); break; } else { next(); } } } protected boolean string() { next(); for (; !eof; ) { if (ch == '\\') { next(); if (ch == 'u') { next(); next(); next(); next(); next(); } else { next(); } } else if (ch == '"') { next(); return true; } else { next(); } } return false; } void skipWhiteSpace() { while (isWhiteSpace(ch)) { next(); } } static final boolean isWhiteSpace(char ch) { return ch == ' ' || ch == '\t' || ch == '\r' || ch == '\n' || ch == '\f' || ch == '\b' ; } static class UTF8Validator extends JSONValidator { private final byte[] bytes; public UTF8Validator(byte[] bytes) { this.bytes = bytes; next(); skipWhiteSpace(); } void next() { ++pos; if (pos >= bytes.length) { ch = '\0'; eof = true; } else { ch = (char) bytes[pos]; } } } static class UTF8InputStreamValidator extends JSONValidator { private final static ThreadLocal<byte[]> bufLocal = new ThreadLocal<byte[]>(); private final InputStream is; private byte[] buf; private int end = -1; private int readCount = 0; public UTF8InputStreamValidator(InputStream is) { this.is = is; buf = bufLocal.get(); if (buf != null) { bufLocal.set(null); } else { buf = new byte[1024 * 8]; } next(); skipWhiteSpace(); } void next() { if (pos < end) { ch = (char) buf[++pos]; } else { if (!eof) { int len; try { len = is.read(buf, 0, buf.length); readCount++; } catch (IOException ex) { throw new JSONException("read error"); } if (len > 0) { ch = (char) buf[0]; pos = 0; end = len - 1; } else if (len == -1) { pos = 0; end = 0; buf = null; ch = '\0'; eof = true; } else { pos = 0; end = 0; buf = null; ch = '\0'; eof = true; throw new JSONException("read error"); } } } } public void close() throws IOException { bufLocal.set(buf); is.close(); } } static class UTF16Validator extends JSONValidator { private final String str; public UTF16Validator(String str) { this.str = str; next(); skipWhiteSpace(); } void next() { ++pos; if (pos >= str.length()) { ch = '\0'; eof = true; } else { ch = str.charAt(pos); } } protected final void fieldName() { for (int i = pos + 1; i < str.length(); ++i) { char ch = str.charAt(i); if (ch == '\\') { break; } if (ch == '\"') { this.ch = str.charAt(i + 1); pos = i + 1; return; } } next(); for (; ; ) { if (ch == '\\') { next(); if (ch == 'u') { next(); next(); next(); next(); next(); } else { next(); } } else if (ch == '"') { next(); break; } else { next(); } } } } static class ReaderValidator extends JSONValidator { private final static ThreadLocal<char[]> bufLocal = new ThreadLocal<char[]>(); final Reader r; private char[] buf; private int end = -1; private int readCount = 0; ReaderValidator(Reader r) { this.r = r; buf = bufLocal.get(); if (buf != null) { bufLocal.set(null); } else { buf = new char[1024 * 8]; } next(); skipWhiteSpace(); } void next() { if (pos < end) { ch = buf[++pos]; } else { if (!eof) { int len; try { len = r.read(buf, 0, buf.length); readCount++; } catch (IOException ex) { throw new JSONException("read error"); } if (len > 0) { ch = buf[0]; pos = 0; end = len - 1; } else if (len == -1) { pos = 0; end = 0; buf = null; ch = '\0'; eof = true; } else { pos = 0; end = 0; buf = null; ch = '\0'; eof = true; throw new JSONException("read error"); } } } } public void close() throws IOException { bufLocal.set(buf); r.close();; } } }
package controllers; import com.basho.riak.client.IRiakClient; import com.basho.riak.client.RiakException; import com.basho.riak.client.RiakFactory; import com.basho.riak.client.RiakRetryFailedException; import com.basho.riak.client.bucket.Bucket; import com.google.common.base.Strings; import com.google.common.escape.Escaper; import com.google.common.html.HtmlEscapers; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; import java.nio.charset.StandardCharsets; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.YearMonth; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Base64; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import models.CityList; import models.Dogpark; import models.DogparkSignup; import models.DogparkSignupList; import ninja.Result; import ninja.Results; import ninja.lifecycle.Dispose; import ninja.lifecycle.Start; import ninja.params.Param; import ninja.params.PathParam; import ninja.postoffice.Mail; import ninja.postoffice.Postoffice; import ninja.session.FlashScope; import ninja.utils.NinjaProperties; import org.slf4j.Logger; @Singleton public class DogparkController { public static final String RIAK_HOST = "riak.host"; public static final String RIAK_PORT = "riak.port"; public static final String BUCKET_NAME = "dogparkmeetups"; public static final String KEY_CITIES = "cities"; public static final String VIEW_404_NOT_FOUND = "views/system/404notFound.ftl.html"; // Guice injected fields private final Logger logger; private final NinjaProperties properties; private final Provider<Mail> mailProvider; private final Postoffice postoffice; private IRiakClient riakClient; private Bucket bucket; @Inject public DogparkController( Logger logger, NinjaProperties properties, Provider<Mail> mailProvider, Postoffice postoffice) { this.logger = logger; this.properties = properties; this.mailProvider = mailProvider; this.postoffice = postoffice; } @Start(order = 10) public void initRiakClientAndBucket() throws RiakException { String host = properties.getWithDefault(RIAK_HOST, "127.0.0.1"); int port = properties.getIntegerWithDefault(RIAK_PORT, 8087); logger.info("Initializing RiakClient on " + host + ":" + port + "..."); riakClient = RiakFactory.pbcClient(host, port); bucket = riakClient.fetchBucket(BUCKET_NAME).execute(); } @Dispose(order = 10) public void shutdownRiakClient() { if (riakClient != null) { logger.info("Shutting down RiakClient..."); riakClient.shutdown(); } } public Result allDogparks() throws RiakRetryFailedException { CityList cities = bucket.fetch(KEY_CITIES, CityList.class).execute(); List<Dogpark> dogparks = cities.stream() .flatMap(city -> city.dogparks.stream()) .collect(Collectors.toList()); return Results.json().render(dogparks); } public Result dogparkListPage() throws RiakRetryFailedException { CityList cities = bucket.fetch(KEY_CITIES, CityList.class).execute(); return Results.html().render("cities", cities); } public Result dogparkPage(@PathParam("id") String dogparkId) throws RiakRetryFailedException { if (dogparkId == null) { return Results.notFound().html().template(VIEW_404_NOT_FOUND); } Optional<Dogpark> dogpark = getDogparkFromDb(dogparkId); if (dogpark.isPresent()) { return Results.html().render("dogpark", dogpark.get()); } else { return Results.notFound().html().template(VIEW_404_NOT_FOUND); } } public Result dogparkSignups( @PathParam("id") String dogparkId, @Param("yearMonth") String yearMonth) throws RiakRetryFailedException { if (dogparkId == null) { return Results.notFound().json(); } Instant timeLower = YearMonth.parse(yearMonth) .atDay(1) .minusMonths(1) .atStartOfDay() .toInstant(ZoneOffset.UTC); Instant timeUpper = YearMonth.parse(yearMonth) .atDay(1) .plusMonths(6) .atStartOfDay() .toInstant(ZoneOffset.UTC); DogparkSignupList allSignups = bucket.fetch(dogparkId, DogparkSignupList.class).execute(); if (allSignups != null) { // Might very well be empty List<DogparkSignup> signups = allSignups.stream() .filter(signup -> { Instant arrival = signup.arrivalTime.toInstant(); return arrival.isAfter(timeLower) && arrival.isBefore(timeUpper); }) .peek(signup -> signup.cancellationCode = null) .collect(Collectors.toList()); return Results.json().render(signups); } else { return Results.json().render(Collections.emptyList()); } } public Result signupPage( @PathParam("id") String dogparkId, @Param("date") String date) throws RiakRetryFailedException { // Parse to validate LocalDate localDate = LocalDate.parse(date.trim()); Optional<Dogpark> dogpark = getDogparkFromDb(dogparkId); if (dogpark.isPresent()) { return Results.html().render("dogpark", dogpark.get()).render("date", date); } return Results.notFound().html().template(VIEW_404_NOT_FOUND); } public Result doSignupPost( FlashScope flashScope, @PathParam("id") String dogparkId, @Param("date") String date, @Param("timeOfArrival") String timeOfArrival, @Param("email") String email, DogparkSignup newSignup) throws RiakRetryFailedException { Optional<Dogpark> dogpark = getDogparkFromDb(dogparkId); if (!dogpark.isPresent()) { return Results.notFound().html().template(VIEW_404_NOT_FOUND); } // Escape against HTML Escaper htmlEscaper = HtmlEscapers.htmlEscaper(); newSignup.dogBreed = htmlEscaper.escape(newSignup.dogBreed); newSignup.dogName = htmlEscaper.escape(newSignup.dogName); newSignup.dogWeightClass = htmlEscaper.escape(newSignup.dogWeightClass); // Generate UUID cancellation code newSignup.generateCancellationCode(); // Parse and set arrival time timeOfArrival = timeOfArrival.trim().replace('.', ':'); LocalDateTime arrivalTimestamp = LocalDateTime.parse(date + " " + timeOfArrival, DateTimeFormatter.ofPattern("yyyy-MM-dd H:m")); ZoneId zoneId = ZoneId.of("Europe/Helsinki"); newSignup.arrivalTime = Date.from(arrivalTimestamp.toInstant(ZonedDateTime.now(zoneId).getOffset())); // Add to signups list DogparkSignupList signups = bucket.fetch(dogparkId, DogparkSignupList.class).execute(); if (signups == null) { signups = new DogparkSignupList(); } signups.add(newSignup); bucket.store(dogparkId, signups).execute(); // If wanted, send cancellation code via email if (!Strings.isNullOrEmpty(email)) { try { sendCancellationCodeViaEmail(email, dogpark.get(), newSignup); } catch (Exception ex) { logger.error(ex.toString()); } } flashScope.put("signupSuccessfulMsg", getCancellationCodeMessage(dogpark.get(), newSignup)); return Results.redirect("/dogparks/" + dogparkId); } public Result doCancelSignupPost( FlashScope flashScope, @Param("cancellationCode") String publicCancellationCode) throws RiakRetryFailedException { boolean cancelSuccessful = false; String decoded; try { decoded = new String( Base64.getDecoder().decode(publicCancellationCode), StandardCharsets.ISO_8859_1 ); } catch (IllegalArgumentException iaex) { decoded = ""; } String[] parts = decoded.split(":"); if (parts.length >= 2) { String dogparkId = parts[0]; String cancellationCode = parts[1]; DogparkSignupList signups = bucket.fetch(dogparkId, DogparkSignupList.class).execute(); if (signups != null) { Optional<DogparkSignup> cancellableSignup = signups.stream() .filter(signup -> signup.cancellationCode.equals(cancellationCode)) .findFirst(); if (cancellableSignup.isPresent()) { signups.remove(cancellableSignup.get()); bucket.store(dogparkId, signups).execute(); cancelSuccessful = true; Optional<Dogpark> dogpark = getDogparkFromDb(dogparkId); flashScope.put("cancellationMsg", getCancellationSuccessMsg(dogpark.get(), cancellableSignup.get())); } } } if (cancelSuccessful) { flashScope.success("cancel.success"); } else { flashScope.error("cancel.invalidCancellationCode"); } return Results.redirect("/cancel"); } private Optional<Dogpark> getDogparkFromDb(String dogparkId) throws RiakRetryFailedException { Optional<Dogpark> dogpark = bucket.fetch(KEY_CITIES, CityList.class).execute().stream() .flatMap(city -> city.dogparks.stream()) .filter(park -> dogparkId.equalsIgnoreCase(park.id)) .findFirst(); return dogpark; } private void sendCancellationCodeViaEmail(String email, Dogpark dogpark, DogparkSignup signup) throws Exception { Mail mail = mailProvider.get(); mail.setSubject("Peruutuskoodi"); mail.addTo(email); mail.setFrom("koirapuistomiitit@gmail.com"); mail.setCharset("utf-8"); mail.setBodyText(getCancellationCodeMessage(dogpark, signup)); postoffice.send(mail); } private static String formPublicCancellationCode(String dogparkId, DogparkSignup signup) { String rawCode = dogparkId + ":" + signup.cancellationCode; String result = Base64.getEncoder().encodeToString(rawCode.getBytes(StandardCharsets.ISO_8859_1)); return result; } private static String getCancellationCodeMessage(Dogpark dogpark, DogparkSignup signup) { String publicCancellationCode = formPublicCancellationCode(dogpark.id, signup); return "Peruutuskoodisi koirapuiston \"" + dogpark.name + "\" ilmoittautumiseen \"" + signup.toString() + "\" on: " + publicCancellationCode; } private static String getCancellationSuccessMsg(Dogpark dogpark, DogparkSignup signup) { String publicCancellationCode = formPublicCancellationCode(dogpark.id, signup); return "Peruutettu koirapuiston \"" + dogpark.name + "\" ilmoittautuminen \"" + signup.toString() + "\"."; } }
package ameba.http.session; import ameba.cache.Cache; import java.util.Map; /** * @author icode */ public class CacheSession extends AbstractSession { private static final String SESSION_PRE_KEY = CacheSession.class.getName() + ".__SESSION__."; private SessionStore store; private boolean fetched = false; private boolean isDelete = false; private boolean isTouch = false; protected CacheSession(String id, String host, long defaultTimeout, boolean isNew) { super(id, host, defaultTimeout, isNew); } public static AbstractSession get(String id) { return Cache.get(getKey(id)); } private static String getKey(String id) { return SESSION_PRE_KEY + id; } @Override public void setAttribute(Object key, Object value) { getStore().getAttributes().put(key, value); } @Override @SuppressWarnings("unchecked") public <V> V getAttribute(Object key) { return (V) getStore().getAttributes().get(key); } @Override public Map<Object, Object> getAttributes() { return getStore().getAttributes(); } @Override @SuppressWarnings("unchecked") public <V> V removeAttribute(Object key) { return (V) getStore().getAttributes().remove(key); } @Override public long getTimeout() { return getStore().getTimeout(); } @Override public void setTimeout(long maxIdleTimeInMillis) { getStore().setTimeout(maxIdleTimeInMillis); } @Override public void invalidate() { Cache.syncDelete(getKey()); isDelete = true; } @Override public boolean isInvalid() { if (isNew) { return false; } if (store == null) { refresh(false); } return store == null; } @Override public long getTimestamp() { return getStore().getTimeout(); } @Override public void flush() { if (!isDelete) { if (store == null) getStore(); if (store != null) { if (store.isChange()) { store.unchange(); Cache.syncSet(getKey(), store, (int) store.getTimeout()); } else if (isTouch) { isTouch = false; Cache.touch(getKey(), (int) getStore().getTimeout()); } } } } @Override public void refresh() { refresh(true); } @Override public void touch() { isTouch = true; } public void refresh(boolean force) { if (!fetched || force) { store = Cache.get(getKey()); fetched = true; } } private SessionStore getStore() { if (store == null) { synchronized (this) { if (store == null) { refresh(false); } if (store == null) { store = new SessionStore(defaultTimeout); } } } return store; } private String getKey() { return getKey(getId()); } @Override protected void setId(String id) { if (id != null && !id.equals(getId())) { super.setId(id); fetched = false; isDelete = false; isTouch = false; } } }
package com.buildndeploy.piglatin; public final class WordUtil { private static final String VOWELS = "AEIOUYaeiouy"; private WordUtil() {}; public static String capitalize(String word) { return Character.toUpperCase(word.charAt(0)) + word.substring(1); } public static boolean isCapitalized(String word) { return Character.isUpperCase(word.charAt(0)); } static boolean startsWithVowel(String word) { return isVowel(word.charAt(0)); } static boolean isVowel(Character letter) { return WordUtil.VOWELS.indexOf(letter) > -1; } static int getIndexOfFirstVowel(String word) { int wordLength = word.length(); int index = -1; for (int i = 0; i < wordLength; i++) { if (isVowel(word.charAt(i))) { index = i; break; } } return index; } }
package controllers; import static services.UserService.getUserByStudentNumber; import static spark.Spark.*; import controllers.utils.Request; import controllers.utils.Response; import services.UserService.*; import static services.SessionService.*; public class SessionController { public static void init() { path("/sessions", () -> { /* * Get all exercises */ get("/:id", (req, res) -> { int id = Integer.parseInt(req.params(":id")); return Response.ok(res, getSessionById(id)); }); exception(NumberFormatException.class, (exception, request, response) -> Response.badRequest(response) ); exception(SessionNotFound.class, (exception, request, response) -> Response.notFound(response) ); /* * Create new session */ post("", (req, res) -> { Session session = Request.getBodyAs(req.body(), Session.class); String studentNumber = Request.getAuthIdentifier(req); User user = getUserByStudentNumber(studentNumber); session.user = user.id; Session createdSession = createSession(session); return Response.created(res, createdSession); }); exception(SessionNotCreated.class, (exception, request, response) -> Response.internalServerError(response) ); }); } }
package main.java.author.view.tabs.terrain; import javax.swing.*; import main.java.author.view.tabs.terrain.types.TileObject; import java.awt.*; import java.awt.event.InputEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; import java.awt.geom.Line2D; import java.awt.image.BufferedImage; import java.util.*; import java.util.List; public class Canvas extends JPanel { public static final Color DEFAULT_TILE_COLOR = Color.LIGHT_GRAY; public static final Color DEFAULT_BORDER_COLOR = Color.BLACK; public static final int NUM_ROWS = 10; public static final int NUM_COLS = 15; public static final int TILE_SIZE = 50; // in pixels private final Tile[][] myTiles; private TileObject selectedTileObj; public Canvas(){ myTiles = new Tile[NUM_ROWS][NUM_COLS]; for (int row = 0; row < NUM_ROWS; row++) { for (int col = 0; col < NUM_COLS; col++) { myTiles[row][col] = new Tile(row, col, DEFAULT_TILE_COLOR); } } setPreferredSize(new Dimension(NUM_COLS*TILE_SIZE, NUM_ROWS*TILE_SIZE)); // important for maintaining size of JPanel addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { updateTile(e); } }); addMouseMotionListener(new MouseMotionAdapter() { @Override public void mouseDragged(MouseEvent e) { updateTile(e); } }); } /** * Updates tiles within the JPanel when the 'repaint()' method is called. Sets the * appropriate background and border of each tile accordingly. */ @Override public void paintComponent(Graphics g) { super.paintComponent(g); // Call to super class is necessary g.clearRect(0, 0, getWidth(), getHeight()); int rectWidth = getWidth() / NUM_COLS; int rectHeight = getHeight() / NUM_ROWS; int index = 0; for (Tile tile : getTiles()) { // Upper left corner of the tile int x = tile.getCol() * rectWidth; int y = tile.getRow() * rectHeight; Color tileColor = tile.getColor(); Image tileImage = tile.getImage(); if (tileImage == null) { g.setColor(tileColor); g.fillRect(x, y, rectWidth, rectHeight); // filling appropriate Tile background colors } else { g.drawImage(tileImage,x,y, rectWidth, rectHeight, tileColor, null); } g.setColor(DEFAULT_BORDER_COLOR); g.drawRect(x, y, rectWidth, rectHeight); // drawing appropriate Tile borders } } /** * Obtains the specified Tile within the JPanel * @param x the x-coordinate of the JPanel * @param y the y-coordinate of the JPanel * @return the Tile for a specific location */ private Tile getTile(int x, int y) { int row = y/TILE_SIZE; int col = x/TILE_SIZE; boolean validRow = row >= 0 && row <= NUM_ROWS - 1; boolean validCol = col >= 0 && col <= NUM_COLS - 1; if (validRow && validCol) { return myTiles[row][col]; } return null; } /** * Obtains a list of tiles within the JPanel * @return all tiles within the JPanel */ private List<Tile> getTiles() { List<Tile> tiles = new ArrayList<Tile>(); for (int i = 0; i < NUM_ROWS; i++) { for (int j = 0; j < NUM_COLS; j++) { tiles.add(myTiles[i][j]); } } return tiles; } private void updateTile(MouseEvent e) { Tile tile = getTile(e.getX(), e.getY()); if (tile == null) { return; } tile.setImage((selectedTileObj == null) ? null : selectedTileObj.getImage()); tile.setColor((selectedTileObj == null) ? DEFAULT_TILE_COLOR : selectedTileObj.getBGColor()); tile.setPassIndex((selectedTileObj == null) ? 0 : selectedTileObj.getPassabilityIndex()); repaint(); } protected void clearTiles() { for (Tile tile : getTiles()) { tile.setImage(null); tile.setColor(DEFAULT_TILE_COLOR); repaint(); } } public void setSelectedTileObj(TileObject tObj) { selectedTileObj = tObj; } }
package com.coremedia.iso; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.List; /** * A <code>FilterInputStream</code> enriched with helper methods to ease writing of * Iso specific numbers and strings. */ public class IsoBufferWrapper { ByteBuffer[] parents; int activeParent = 0; public int readBitsRemaining; private byte readBitsBuffer; public IsoBufferWrapper(ByteBuffer parent) { this.parents = new ByteBuffer[]{parent}; } public IsoBufferWrapper(ByteBuffer[] parents) { this.parents = parents; } public IsoBufferWrapper(List<ByteBuffer> parents) { this.parents = parents.toArray(new ByteBuffer[parents.size()]); } public IsoBufferWrapper(File file) throws IOException { long filelength = file.length(); int sliceSize = 1024 * 1024 * 128; RandomAccessFile raf = new RandomAccessFile(file, "r"); ArrayList<ByteBuffer> buffers = new ArrayList<ByteBuffer>(); long i = 0; while (i < filelength) { if ((filelength - i) > sliceSize) { ByteBuffer bb; try { bb = raf.getChannel().map(FileChannel.MapMode.READ_ONLY, i, sliceSize);//.slice(); } catch (IOException e1) { try { bb = raf.getChannel().map(FileChannel.MapMode.READ_ONLY, i, sliceSize);//.slice(); } catch (IOException e2) { try { bb = raf.getChannel().map(FileChannel.MapMode.READ_ONLY, i, sliceSize);//.slice(); } catch (IOException e3) { bb = raf.getChannel().map(FileChannel.MapMode.READ_ONLY, i, sliceSize);//.slice(); } } } buffers.add(bb); i += sliceSize; } else { buffers.add(raf.getChannel().map(FileChannel.MapMode.READ_ONLY, i, filelength - i).slice()); i += filelength - i; } } parents = buffers.toArray(new ByteBuffer[buffers.size()]); raf.close(); } public long position() { if (activeParent >= 0) { long pos = 0; for (int i = 0; i < activeParent; i++) { pos += parents[i].limit(); } pos += parents[activeParent].position(); return pos; } else { return size(); } } public void position(long position) { if (position == size()) { activeParent = -1; } else { int current = 0; while (position >= parents[current].limit()) { position -= parents[current++].limit(); } parents[current].position((int) position); activeParent = current; } } public long size() { long size = 0; for (ByteBuffer parent : parents) { size += parent.limit(); } return size; } public long readUInt64() { long result = 0; // thanks to Erik Nicolas for finding a bug! Cast to long is definitivly needed result += readUInt32() << 32; if (result < 0) { throw new RuntimeException("I don't know how to deal with UInt64! long is not sufficient and I don't want to use BigInt"); } result += readUInt32(); return result; } public long readUInt32() { long result = 0; result += ((long) readUInt16()) << 16; result += readUInt16(); return result; } public int readUInt24() { int result = 0; result += readUInt16() << 8; result += readUInt8(); return result; } public int readUInt16() { int result = 0; result += readUInt8() << 8; result += readUInt8(); return result; } public int readUInt8() { byte b = read(); return b < 0 ? b + 256 : b; } public byte[] read(int byteCount) { byte[] result = new byte[byteCount]; this.read(result); return result; } public long remaining() { if (activeParent == -1) { return 0; } else { long remaining = 0; for (int i = activeParent; i < parents.length; i++) { remaining += parents[i].remaining(); } return remaining; } } public byte read() { if (parents[activeParent].remaining() == 0) { if (parents.length > activeParent + 1) { activeParent++; parents[activeParent].rewind(); return read(); } else { //todo: consider a parents' rewind on init? try { throw new RuntimeException("No more bytes to read remaining! Make sure position is set correctly after instantiation."); } catch (RuntimeException e) { System.out.println("read beyond buffers!"); e.printStackTrace(); return 0; } } } return parents[activeParent].get(); } public int read(byte[] b) { return read(b, 0, b.length); } public int read(byte[] b, int off, int len) { if (parents[activeParent].remaining() >= len) { parents[activeParent].get(b, off, len); return len; } else { int curRemaining = parents[activeParent].remaining(); parents[activeParent].get(b, off, curRemaining); activeParent++; parents[activeParent].rewind(); return curRemaining + read(b, off + curRemaining, len - curRemaining); } } public double readFixedPoint1616() { byte[] bytes = read(4); int result = 0; result |= ((bytes[0] << 24) & 0xFF000000); result |= ((bytes[1] << 16) & 0xFF0000); result |= ((bytes[2] << 8) & 0xFF00); result |= ((bytes[3]) & 0xFF); return ((double) result) / 65536; } public float readFixedPoint88() { byte[] bytes = read(2); short result = 0; result |= ((bytes[0] << 8) & 0xFF00); result |= ((bytes[1]) & 0xFF); return ((float) result) / 256; } public String readIso639() { int bits = readUInt16(); StringBuilder result = new StringBuilder(); for (int i = 0; i < 3; i++) { int c = (bits >> (2 - i) * 5) & 0x1f; result.append((char) (c + 0x60)); } return result.toString(); } /** * Reads a zero terminated string. * * @return the string read * @throws Error in case of an error in the underlying stream */ public String readString() { ByteArrayOutputStream out = new ByteArrayOutputStream(); int read; while ((read = read()) != 0) { out.write(read); } try { return out.toString("UTF-8"); } catch (UnsupportedEncodingException e) { throw new Error("JVM doesn't support UTF-8"); } } public String readString(int length) { byte[] buffer = new byte[length]; this.read(buffer); try { return new String(buffer, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new Error("JVM doesn't support UTF-8"); } } public long skip(long n) { this.position(this.position() + n); return n; } public ByteBuffer[] getSegment(long startPos, long length) { ArrayList<ByteBuffer> segments = new ArrayList<ByteBuffer>(); position(startPos); while (length > 0) { ByteBuffer currentSlice = parents[activeParent].slice(); if (currentSlice.remaining() >= length) { currentSlice.limit((int) length); // thats ok we tested in the line before length -= length; } else { // ok use up current bytebuffer and jump to next length -= currentSlice.remaining(); parents[++activeParent].rewind(); } segments.add(currentSlice); } return segments.toArray(new ByteBuffer[segments.size()]); } public long readUInt32BE() { long result = 0; result += readUInt16BE(); result += ((long) readUInt16BE()) << 16; return result; } public int readUInt16BE() { int result = 0; result += readUInt8(); result += readUInt8() << 8; return result; } public int readBits(int i) { if (i > 31) { //> signed int throw new IllegalArgumentException("cannot read more than 31 bits"); } int ret = 0; while (i > 8) { final int moved = parse8(8) << i - 8; ret = ret | moved; i -= 8; } return ret | parse8(i); } private int parse8(int i) { if (readBitsRemaining == 0) { readBitsBuffer = read(); readBitsRemaining = 8; } if (i > readBitsRemaining) { final int resultRemaining = i - readBitsRemaining; int buffer = (readBitsBuffer & (int) (Math.pow(2, readBitsRemaining) - 1)) << resultRemaining; readBitsBuffer = read(); readBitsRemaining = 8 - resultRemaining; final int movedAndMasked = (readBitsBuffer >>> readBitsRemaining) & (int) (Math.pow(2, resultRemaining) - 1); return buffer | movedAndMasked; } else { readBitsRemaining -= i; return (readBitsBuffer >>> readBitsRemaining) & (int) (Math.pow(2, i) - 1); } } public int getReadBitsRemaining() { return readBitsRemaining; } }
package csci432.camera; import javax.imageio.ImageIO; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import static java.lang.Runtime.getRuntime; public class RaspberryPiCam implements Camera{ private Integer picNumber; private final String saveLocation; public RaspberryPiCam(String saveLocation){ this.saveLocation = saveLocation; this.picNumber = 0; } @Override public void takePicture() { try { Process p = Runtime.getRuntime().exec("raspistill -o "+saveLocation + picNumber + ".jpg --nopreview --timeout 1"); p.waitFor(); } catch (IOException ex) { ex.printStackTrace(); } catch (InterruptedException ix) { ix.printStackTrace(); } picNumber++; } @Override public void takePictureOnInterval(Long pause, Long duration) { try { getRuntime().exec("raspistill -w 500 -h 500 -tl " + pause + " -t " + duration + " -o " + saveLocation + "original_%03d.jpg --nopreview"); } catch (IOException ex) { ex.printStackTrace(); } } /** * Gets an unfiltered image to filter * * @return an unfiltered image */ public BufferedImage getUnfilteredImage() { String location = String.format(saveLocation+"original_%1$03d.jpg", picNumber); BufferedImage image = null; try { File file = new File(location); for (int i = picNumber+1; i < 5 && !file.exists(); i++) { location = String.format(saveLocation+"original_%1$03d.jpg", i); file = new File(location); } image = ImageIO.read(new File(location)); picNumber++; } catch (IOException e) { System.out.println(location+ " and 5 following images not taking yet, waiting..."); } finally { return image; } } }
package com.davidsoergel.stats; import org.apache.log4j.Logger; import java.util.Set; /** * @Author David Soergel * @Version 1.0 */ public abstract class Histogram1D extends SimpleXYSeries { private static Logger logger = Logger.getLogger(Histogram1D.class); int validcounts, totalcounts; protected int[] counts; protected int bins; //protected int underflow, overflow; protected double from, to; private double totalsum = 0;// handy to keep the sum around to get the mean quickly public Histogram1D(double from, double to, int bins) { this.from = from; this.to = to; this.bins = bins; counts = new int[bins]; } public int[] getCounts() { return counts; } public void addXValues(SimpleXYSeries s) { for (double i : s.getXArray()) { add(i); } } public void add(double x) { try { counts[bin(x)]++; validcounts++; } catch (StatsException e) { // out of range } totalsum += x; totalcounts++; } public abstract int bin(double x) throws StatsException; public void addXValues(Set<SimpleXYSeries> ss) { for (SimpleXYSeries s : ss) { addXValues(s); } } public void addYValues(SimpleXYSeries s) { for (double i : s.getYArray()) { add(i); } } public void addYValues(Set<SimpleXYSeries> ss) { for (SimpleXYSeries s : ss) { addYValues(s); } } public double approximateStdDev() { double mean = mean(); double var = 0; for (int i = 0; i < counts.length; i++) { double d = 0; try { d = centerOfBin(i) - mean; } catch (StatsException e) { logger.debug(e); e.printStackTrace(); throw new Error("Impossible"); } var += counts[i] * d * d; } var /= validcounts; return Math.sqrt(var); } //private double sum = 0; /* public double approximateMean() { //int i = 0; if(mean == 0) { for (int i = 0; i < counts.length; i++) { mean += counts[i] * centerOfBin(i); } mean /= validcounts; } return mean; } */ /** * The mean of all the numbers that have been added to this histogram, whether or not they were in range * * @return */ public double mean() { return totalsum / totalcounts; } public double centerOfBin(int i) throws StatsException { return (topOfBin(i) + bottomOfBin(i)) / 2; } public abstract double topOfBin(int bin) throws StatsException; public abstract double bottomOfBin(int bin) throws StatsException; public double[] getBinCenters() throws StatsException { double[] result = new double[counts.length]; for (int i = 0; i < counts.length; i++) { result[i] = centerOfBin(i); } return result; } }
package de.bmoth.app; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.TextField; import javafx.stage.Stage; public class OptionController { public static final String NONNUMERICWARNING="Not Numeric or out of Range: "; public TextField minInt; public TextField maxInt; public TextField maxInitState; public TextField maxTrans; Stage stage; public Stage getStage(Parent root) { if (stage != null) return stage; Scene scene = new Scene(root); this.stage = new Stage(); stage.setScene(scene); setupStage(); return stage; } private void setupStage() { stage.setTitle("Options"); minInt.setText(String.valueOf(PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MIN_INT))); maxInt.setText(String.valueOf(PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MAX_INT))); maxInitState.setText(String.valueOf(PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MAX_INITIAL_STATE))); maxTrans.setText(String.valueOf(PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MAX_TRANSITIONS))); // selects end of first Textfield for Caret minInt.requestFocus(); minInt.selectRange(99, 99); } private boolean checkPrefs() { if (!isNumeric(maxInt.getText())) { new Alert(Alert.AlertType.ERROR, NONNUMERICWARNING + minInt.getId()).show(); return false; } if (!isNumeric(maxInt.getText())) { new Alert(Alert.AlertType.ERROR, NONNUMERICWARNING + maxInt.getId()).show(); return false; } if (!isNumeric(maxInitState.getText())) { new Alert(Alert.AlertType.ERROR, NONNUMERICWARNING + maxInitState.getId()).show(); return false; } if (!isNumeric(maxTrans.getText())) { new Alert(Alert.AlertType.ERROR, NONNUMERICWARNING+ maxTrans.getId()).show(); return false; } if (Integer.parseInt(minInt.getText()) > Integer.parseInt(maxInt.getText())) { new Alert(Alert.AlertType.ERROR, "MIN_INT bigger than MAX_INT").show(); return false; } if(Integer.parseInt(maxInitState.getText())<1) { new Alert(Alert.AlertType.ERROR, "InitialStates needs to be bigger than 0").show(); return false; } if(Integer.parseInt(maxTrans.getText())<1){ new Alert(Alert.AlertType.ERROR, "Maximum transitions needs to be bigger than 0").show(); return false; } return true; } private void savePrefs() { PersonalPreferences.setIntPreference(PersonalPreferences.IntPreference.MIN_INT, minInt.getText()); PersonalPreferences.setIntPreference(PersonalPreferences.IntPreference.MAX_INT, maxInt.getText()); PersonalPreferences.setIntPreference(PersonalPreferences.IntPreference.MAX_INITIAL_STATE, maxInitState.getText()); PersonalPreferences.setIntPreference(PersonalPreferences.IntPreference.MAX_TRANSITIONS, maxTrans.getText()); } public void handleApply() { if (checkPrefs()) savePrefs(); } public void handleClose() { stage.close(); } public void handleOk() { if (checkPrefs()) { savePrefs(); stage.close(); } } private boolean isNumeric(String s) { try { int x =Integer.parseInt(s); if(x==0); //tricking SonaqQube return true; } catch (NumberFormatException e) { return false; } } }
package main.java.com.YeAJG.game.utils; import java.util.Random; /** * * @author Richard */ public class Randomizer { private static Random r = new Random(); public static float getValue(float a, float b) { if(b > 0) return (r.nextInt((int) b)) + a; else return a; } }
package com.dua3.meja.db; import com.dua3.meja.model.Row; import com.dua3.meja.model.Sheet; import java.sql.Clob; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.logging.Level; import java.util.logging.Logger; public class DbMejaUtil { /** Logger */ protected static final Logger LOG = Logger.getLogger(DbMejaUtil.class.getSimpleName()); private static final String ERROR_TEXT = " /** * Fill Sheet with data from {@link ResultSet}. * * The resuult set data will be appended at the bottom of the sheet. * @param sheet * the sheet to fill * @param rs * the ResultSet * @param addTableHeader * whether to generate a header row * @return * the number of rows read * @throws SQLException * if an error occurs while reading from the ResultSet. */ public static int fill(Sheet sheet, ResultSet rs, boolean addTableHeader) throws SQLException { LOG.fine("populating Sheet with ResultSet data"); // read result metadata LOG.finer("reading result meta data ..."); ResultSetMetaData meta = rs.getMetaData(); int nColumns = meta.getColumnCount(); // create table header if (addTableHeader) { LOG.finer("creating table header ..."); Row header = sheet.createRow(); for (int i = 1; i <= nColumns; i++) { String label = meta.getColumnLabel(i); header.createCell().set(label); } } // read result LOG.finer("reading result data ..."); int k = 0; while (rs.next()) { Row row = sheet.createRow(); for (int i = 1; i <= nColumns; i++) { row.createCell().set(getObject(rs, i)); } k++; } final int n = k; LOG.finer(() -> "read "+n+" rows of data"); return n; } private static Object getObject(ResultSet rs, int i) throws SQLException { Object obj = rs.getObject(i); if (obj instanceof Clob) { obj = toString((Clob) obj); } return obj; } private static String toString(Clob clob) { try { return clob.getSubString(1, (int) Math.min(Integer.MAX_VALUE, clob.length())); } catch (SQLException e) { LOG.log(Level.WARNING, "could no convert Clob to String", e); return ERROR_TEXT; } } private DbMejaUtil() { // utility class } }
package de.fau.osr.util; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; /** * @author Gayathery * @desc class to query property pages * */ public class AppProperties { /** * @param key * @desc This method returns the values of properties represented by the param * @author Gayathery */ public static String GetValue(String key) { Properties properties = new Properties(); try { properties.load(AppProperties.class.getResourceAsStream("/prop.properties")); return properties.getProperty(key); } catch (IOException e) { e.printStackTrace(); } return null; } /** * @param key * @desc This method returns the values of properties represented by the param as an integer * @author Gayathery */ public static Integer GetValueAsInt(String key) { Properties properties = new Properties(); try { properties.load(AppProperties.class.getResourceAsStream("/prop.properties")); return Integer.parseInt(properties.getProperty(key)); } catch (IOException e) { e.printStackTrace(); } return null; } }
package com.akiban.ais.model; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.akiban.ais.model.Join.GroupingUsage; import com.akiban.ais.model.Join.SourceType; // AISBuilder can be used to create an AIS. The API is designed to sify the creation of an AIS during a scan // of a dump. The user need not search the AIS and hold on to AIS objects (UserTable, Column, etc.). Instead, // only names from the dump need be supplied. public class AISBuilder { private static final Logger LOG = LoggerFactory.getLogger(AISBuilder.class); // API for creating capturing basic schema information public AISBuilder() { this(new AkibanInformationSchema(), new DefaultNameGenerator()); } public AISBuilder(NameGenerator nameGenerator) { this(new AkibanInformationSchema(), nameGenerator); } public AISBuilder(AkibanInformationSchema ais) { this(ais, new DefaultNameGenerator()); } public AISBuilder(AkibanInformationSchema ais, NameGenerator nameGenerator) { LOG.trace("creating builder"); this.ais = ais; this.nameGenerator = nameGenerator; if (ais != null) { for (UserTable table : ais.getUserTables().values()) { tableIdGenerator = Math.max(tableIdGenerator, table.getTableId() + 1); } } } public void setTableIdOffset(int offset) { this.tableIdGenerator = offset; } public void setIndexIdOffset (int offset) { this.indexIdGenerator = offset; } public void sequence (String schemaName, String sequenceName, long start, long increment, long minValue, long maxValue, boolean cycle) { LOG.info("sequence: {}.{} ", schemaName,sequenceName); Sequence identityGenerator = Sequence.create(ais, schemaName, sequenceName, start, increment, minValue, maxValue, cycle); identityGenerator.setTreeName(nameGenerator.generateIdentitySequenceTreeName(identityGenerator)); } public void userTable(String schemaName, String tableName) { LOG.info("userTable: " + schemaName + "." + tableName); UserTable.create(ais, schemaName, tableName, tableIdGenerator++); } public void userTableInitialAutoIncrement(String schemaName, String tableName, Long initialAutoIncrementValue) { LOG.info("userTableInitialAutoIncrement: " + schemaName + "." + tableName + " = " + initialAutoIncrementValue); UserTable table = ais.getUserTable(schemaName, tableName); checkFound(table, "setting initial autoincrement value", "user table", concat(schemaName, tableName)); table.setInitialAutoIncrementValue(initialAutoIncrementValue); } public void view(String schemaName, String tableName, String definition, Properties definitionProperties, Map<TableName,Collection<String>> tableColumnReferences) { LOG.info("view: " + schemaName + "." + tableName); View.create(ais, schemaName, tableName, definition, definitionProperties, tableColumnReferences); } public void column(String schemaName, String tableName, String columnName, Integer position, String typeName, Long typeParameter1, Long typeParameter2, Boolean nullable, Boolean autoIncrement, String charset, String collation) { column(schemaName, tableName, columnName, position, typeName, typeParameter1, typeParameter2, nullable, autoIncrement, charset, collation, null); } public void column(String schemaName, String tableName, String columnName, Integer position, String typeName, Long typeParameter1, Long typeParameter2, Boolean nullable, Boolean autoIncrement, String charset, String collation, String defaultValue) { LOG.info("column: " + schemaName + "." + tableName + "." + columnName); Columnar table = ais.getColumnar(schemaName, tableName); checkFound(table, "creating column", "user table", concat(schemaName, tableName)); Type type = ais.getType(typeName); checkFound(type, "creating column", "type", typeName); Column column = Column.create(table, columnName, position, type); column.setNullable(nullable); column.setAutoIncrement(autoIncrement); column.setTypeParameter1(typeParameter1); column.setTypeParameter2(typeParameter2); column.setCharset(charset); column.setCollation(collation); column.setDefaultValue(defaultValue); column.finishCreating(); } public void columnAsIdentity (String schemaName, String tableName, String columnName, String sequenceName, Boolean defaultIdentity) { LOG.info("column as identity: " + schemaName + "." + tableName + "." + columnName + ": " + sequenceName); Column column = ais.getTable(schemaName, tableName).getColumn(columnName); column.setDefaultIdentity(defaultIdentity); Sequence identityGenerator = ais.getSequence(new TableName (schemaName, sequenceName)); column.setIdentityGenerator(identityGenerator); } public void index(String schemaName, String tableName, String indexName, Boolean unique, String constraint) { LOG.info("index: " + schemaName + "." + tableName + "." + indexName); Table table = ais.getTable(schemaName, tableName); checkFound(table, "creating index", "table", concat(schemaName, tableName)); Index index = TableIndex.create(ais, table, indexName, indexIdGenerator++, unique, constraint); index.setTreeName(nameGenerator.generateIndexTreeName(index)); } /** @deprecated */ public void groupIndex(String groupName, String indexName, Boolean unique, Index.JoinType joinType) { groupIndex(findFullGroupName(groupName), indexName, unique, joinType); } public void groupIndex(TableName groupName, String indexName, Boolean unique, Index.JoinType joinType) { LOG.info("groupIndex: " + groupName + "." + indexName); Group group = ais.getGroup(groupName); checkFound(group, "creating group index", "group", groupName.toString()); setRootIfNeeded(group); String constraint = unique ? Index.UNIQUE_KEY_CONSTRAINT : Index.KEY_CONSTRAINT; Index index = GroupIndex.create(ais, group, indexName, indexIdGenerator++, unique, constraint, joinType); index.setTreeName(nameGenerator.generateIndexTreeName(index)); } public void indexColumn(String schemaName, String tableName, String indexName, String columnName, Integer position, Boolean ascending, Integer indexedLength) { LOG.info("indexColumn: " + schemaName + "." + tableName + "." + indexName + ":" + columnName); Table table = ais.getTable(schemaName, tableName); checkFound(table, "creating index column", "table", concat(schemaName, tableName)); Column column = table.getColumn(columnName); checkFound(column, "creating index column", "column", concat(schemaName, tableName, columnName)); Index index = table.getIndex(indexName); checkFound(table, "creating index column", "index", concat(schemaName, tableName, indexName)); IndexColumn.create(index, column, position, ascending, indexedLength); } /** @deprecated **/ public void groupIndexColumn(String groupName, String indexName, String schemaName, String tableName, String columnName, Integer position) { groupIndexColumn(findFullGroupName(groupName), indexName, schemaName, tableName, columnName, position); } public void groupIndexColumn(TableName groupName, String indexName, String schemaName, String tableName, String columnName, Integer position) { LOG.info("groupIndexColumn: " + groupName + "." + indexName + ":" + columnName); Group group = ais.getGroup(groupName); checkFound(group, "creating group index column", "group", groupName.toString()); Index index = group.getIndex(indexName); checkFound(index, "creating group index column", "index", concat(groupName.toString(), indexName)); Table table = ais.getTable(schemaName, tableName); if (!table.getGroup().getName().equals(groupName)) { throw new IllegalArgumentException("group name mismatch: " + groupName + " != " + table.getGroup()); } checkFound(table, "creating group index column", "table", concat(schemaName, tableName)); Column column = table.getColumn(columnName); checkFound(column, "creating group index column", "column", concat(schemaName, tableName, columnName)); IndexColumn.create(index, column, position, true, null); } public void joinTables(String joinName, String parentSchemaName, String parentTableName, String childSchemaName, String childTableName) { LOG.info("joinTables: " + joinName + ": " + childSchemaName + "." + childTableName + " -> " + parentSchemaName + "." + parentTableName); UserTable child = ais.getUserTable(childSchemaName, childTableName); checkFound(child, "creating join", "child table", concat(childSchemaName, childTableName)); UserTable parent = ais.getUserTable(parentSchemaName, parentTableName); if (parent == null) { TableName parentName = new TableName(parentSchemaName, parentTableName); ForwardTableReference forwardTableReference = new ForwardTableReference( joinName, parentName, child); forwardReferences.put(joinName, forwardTableReference); } else { Join.create(ais, joinName, parent, child); } } public void joinColumns(String joinName, String parentSchemaName, String parentTableName, String parentColumnName, String childSchemaName, String childTableName, String childColumnName) { LOG.info("joinColumns: " + joinName + ": " + childSchemaName + "." + childTableName + "." + childColumnName + " -> " + parentSchemaName + "." + parentTableName + "." + parentColumnName); // Get child info UserTable childTable = ais .getUserTable(childSchemaName, childTableName); checkFound(childTable, "creating join column", "child table", concat(childSchemaName, childTableName)); Column childColumn = childTable.getColumn(childColumnName); checkFound(childColumn, "creating join column", "child column", concat(childSchemaName, childTableName, childColumnName)); // Handle parent - could be a forward reference UserTable parentTable = ais.getUserTable(parentSchemaName, parentTableName); if (parentTable == null) { // forward reference ForwardTableReference forwardTableReference = forwardReferences .get(joinName); forwardTableReference.addColumnReference(parentColumnName, childColumn); } else { // we've seen the child table Column parentColumn = parentTable.getColumn(parentColumnName); checkFound(parentColumn, "creating join column", "parent column", concat(parentSchemaName, parentTableName, parentColumnName)); Join join = ais.getJoin(joinName); checkFound( join, "creating join column", "join", concat(parentSchemaName, parentTableName, parentColumnName) + "/" + concat(childSchemaName, childTableName, childColumnName)); join.addJoinColumn(parentColumn, childColumn); } } public void routine(String schemaName, String routineName, String language, Routine.CallingConvention callingConvention) { LOG.info("routine: {}.{} ", schemaName, routineName); Routine routine = Routine.create(ais, schemaName, routineName, language, callingConvention); } public void parameter(String schemaName, String routineName, String parameterName, Parameter.Direction direction, String typeName, Long typeParameter1, Long typeParameter2) { LOG.info("parameter: {} {}", concat(schemaName, routineName), parameterName); Routine routine = ais.getRoutine(schemaName, routineName); checkFound(routine, "creating parameter", "routine", concat(schemaName, routineName)); Type type = ais.getType(typeName); checkFound(type, "creating parameter", "type", typeName); Parameter parameter = Parameter.create(routine, parameterName, direction, type, typeParameter1, typeParameter2); } public void routineExternalName(String schemaName, String routineName, String jarSchema, String jarName, String className, String methodName) { LOG.info("external name: {} {}", concat(schemaName, routineName), concat(jarName, className, methodName)); Routine routine = ais.getRoutine(schemaName, routineName); checkFound(routine, "external name", "routine", concat(schemaName, routineName)); SQLJJar sqljJar = null; if (jarName != null) { sqljJar = ais.getSQLJJar(jarSchema, jarName); checkFound(sqljJar, "external name", "SQJ/J jar", concat(jarSchema, jarName)); } routine.setExternalName(sqljJar, className, methodName); } public void routineDefinition(String schemaName, String routineName, String definition) { LOG.info("external name: {} {}", concat(schemaName, routineName), definition); Routine routine = ais.getRoutine(schemaName, routineName); checkFound(routine, "external name", "routine", concat(schemaName, routineName)); routine.setDefinition(definition); } public void routineSQLAllowed(String schemaName, String routineName, Routine.SQLAllowed sqlAllowed) { LOG.info("SQL allowed: {} {}", concat(schemaName, routineName), sqlAllowed); Routine routine = ais.getRoutine(schemaName, routineName); checkFound(routine, "SQL allowed", "routine", concat(schemaName, routineName)); routine.setSQLAllowed(sqlAllowed); } public void routineDynamicResultSets(String schemaName, String routineName, int dynamicResultSets) { LOG.info("dynamic result sets: {} {}", concat(schemaName, routineName), dynamicResultSets); Routine routine = ais.getRoutine(schemaName, routineName); checkFound(routine, "dynamic result sets", "routine", concat(schemaName, routineName)); routine.setDynamicResultSets(dynamicResultSets); } public void sqljJar(String schemaName, String jarName, URL url) { LOG.info("SQL/J jar: {}.{} ", schemaName, jarName); SQLJJar sqljJar = SQLJJar.create(ais, schemaName, jarName, url); } public void basicSchemaIsComplete() { LOG.info("basicSchemaIsComplete"); for (UserTable userTable : ais.getUserTables().values()) { userTable.endTable(); // endTable may have created new index, set its tree name if so Index index = userTable.getPrimaryKeyIncludingInternal().getIndex(); if (index.getTreeName() == null) { index.setTreeName(nameGenerator.generateIndexTreeName(index)); } } for (ForwardTableReference forwardTableReference : forwardReferences.values()) { UserTable childTable = forwardTableReference.childTable(); UserTable parentTable = ais.getUserTable(forwardTableReference .parentTableName().getSchemaName(), forwardTableReference .parentTableName().getTableName()); if (parentTable != null){ Join join = Join.create(ais, forwardTableReference.joinName(), parentTable, childTable); for (ForwardColumnReference forwardColumnReference : forwardTableReference .forwardColumnReferences()) { Column childColumn = forwardColumnReference.childColumn(); Column parentColumn = parentTable .getColumn(forwardColumnReference.parentColumnName()); checkFound(childColumn, "marking basic schema complete", "parent column", forwardColumnReference.parentColumnName()); join.addJoinColumn(parentColumn, childColumn); } } } forwardReferences.clear(); } // API for describing groups public void createGroup(String groupName, String groupSchemaName) { LOG.info("createGroup: {}.{}", groupSchemaName, groupName); Group group = Group.create(ais, groupSchemaName, groupName); group.setTreeName(nameGenerator.generateGroupTreeName(groupSchemaName, groupName)); } /** @deprecated **/ public void deleteGroup(String groupName) { deleteGroup(findFullGroupName(groupName)); } public void deleteGroup(TableName groupName) { LOG.info("deleteGroup: " + groupName); Group group = ais.getGroup(groupName); checkFound(group, "deleting group", "group", groupName.toString()); boolean groupEmpty = true; for (UserTable userTable : ais.getUserTables().values()) { if (userTable.getGroup() == group) { groupEmpty = false; } } if (groupEmpty) { ais.deleteGroup(group); } else { throw new GroupNotEmptyException(group); } } /** @deprecated **/ public void addTableToGroup(String groupName, String schemaName, String tableName) { addTableToGroup(findFullGroupName(groupName), schemaName, tableName); } public void addTableToGroup(TableName groupName, String schemaName, String tableName) { LOG.info("addTableToGroup: " + groupName + ": " + schemaName + "." + tableName); // group Group group = ais.getGroup(groupName); checkFound(group, "adding table to group", "group", groupName.toString()); // table UserTable table = ais.getUserTable(schemaName, tableName); checkFound(table, "adding table to group", "table", concat(schemaName, tableName)); checkGroupAddition(group, table.getGroup(), concat(schemaName, tableName)); setTablesGroup(table, group); } // addJoinToGroup and removeJoinFromGroup identify a join based on parent // and child tables. This is OK for // removeJoinFromGroup because of the restrictions on group structure. It // DOES NOT WORK for addJoinToGroup, // because there could be multiple candidate joins between a pair of tables. /** @deprecated **/ public void addJoinToGroup(String groupName, String joinName, Integer weight) { addJoinToGroup(findFullGroupName(groupName), joinName, weight); } public void addJoinToGroup(TableName groupName, String joinName, Integer weight) { LOG.info("addJoinToGroup: " + groupName + ": " + joinName); // join Join join = ais.getJoin(joinName); checkFound(join, "adding join to group", "join", joinName); // group Group group = ais.getGroup(groupName); checkFound(group, "adding join to group", "group", groupName.toString()); // parent String parentSchemaName = join.getParent().getName().getSchemaName(); String parentTableName = join.getParent().getName().getTableName(); UserTable parent = ais.getUserTable(parentSchemaName, parentTableName); checkFound(parent, "adding join to group", "parent table", concat(parentSchemaName, parentTableName)); checkGroupAddition(group, parent.getGroup(), concat(parentSchemaName, parentTableName)); setTablesGroup(parent, group); // child String childSchemaName = join.getChild().getName().getSchemaName(); String childTableName = join.getChild().getName().getTableName(); UserTable child = ais.getUserTable(childSchemaName, childTableName); checkFound(child, "adding join to group", "child table", concat(childSchemaName, childTableName)); checkGroupAddition(group, child.getGroup(), concat(childSchemaName, childTableName)); checkCycle(child, group); setTablesGroup(child, group); join.setGroup(group); join.setWeight(weight); assert join.getParent() == parent : join; checkGroupAddition(group, join.getGroup(), joinName); } public void removeTableFromGroup(String groupName, String schemaName, String tableName) { removeTableFromGroup(findFullGroupName(groupName), schemaName, tableName); } public void removeTableFromGroup(TableName groupName, String schemaName, String tableName) { LOG.info("removeTableFromGroup: " + groupName + ": " + schemaName + "." + tableName); // This is only valid for a single-table group. // group Group group = ais.getGroup(groupName); checkFound(group, "removing join from group", "group", groupName.toString()); // table UserTable table = ais.getUserTable(schemaName, tableName); checkFound(table, "removing join from group", "table table", concat(schemaName, tableName)); checkInGroup(group, table, "removing join from group", "table table"); if (table.getParentJoin() != null || !table.getChildJoins().isEmpty()) { throw new GroupStructureException( "Cannot remove table from a group unless " + "it is the only table in the group, group " + group.getName() + ", table " + table.getName()); } setTablesGroup(table, null); } /** @deprecated **/ public void removeJoinFromGroup(String groupName, String joinName) { removeJoinFromGroup(findFullGroupName(groupName), joinName); } public void removeJoinFromGroup(TableName groupName, String joinName) { LOG.info("removeJoinFromGroup: " + groupName + ": " + joinName); // join Join join = ais.getJoin(joinName); checkFound(join, "removing join from group", "join", joinName); // group Group group = ais.getGroup(groupName); checkFound(group, "removing join from group", "group", groupName.toString()); checkInGroup(group, join, "removing join from group", "child table"); // parent String parentSchemaName = join.getParent().getName().getSchemaName(); String parentTableName = join.getParent().getName().getTableName(); UserTable parent = ais.getUserTable(parentSchemaName, parentTableName); checkFound(parent, "removing join from group", "parent table", concat(parentSchemaName, parentTableName)); checkInGroup(group, parent, "removing join from group", "parent table"); // child String childSchemaName = join.getChild().getName().getSchemaName(); String childTableName = join.getChild().getName().getTableName(); UserTable child = ais.getUserTable(childSchemaName, childTableName); checkFound(child, "removing join from group", "child table", concat(childSchemaName, childTableName)); checkInGroup(group, child, "removing join from group", "child table"); // Remove the join from the group join.setGroup(null); // Remove the parent from the group if it isn't involved in any other // joins in this group. if (parent.getChildJoins().size() == 0 && parent.getParentJoin() == null) { setTablesGroup(parent, null); } // Same for the child (except we know that parent is null) assert child.getParentJoin() == null; if (child.getChildJoins().size() == 0) { setTablesGroup(child, null); } } /** @deprecated **/ public void moveTreeToGroup(String schemaName, String tableName, String groupName, String joinName) { moveTreeToGroup(schemaName, tableName, findFullGroupName(groupName), joinName); } public void moveTreeToGroup(String schemaName, String tableName, TableName groupName, String joinName) { LOG.info("moveTree: " + schemaName + "." + tableName + " -> " + groupName + " via join " + joinName); // table UserTable table = ais.getUserTable(schemaName, tableName); checkFound(table, "moving tree", "table", concat(schemaName, tableName)); // group Group group = ais.getGroup(groupName); checkFound(group, "moving tree", "group", groupName.toString()); // join Join join = ais.getJoin(joinName); checkFound(join, "moving tree", "join", joinName); // Remove table's parent join from its current group (if there is a // parent) Join parentJoin = table.getParentJoin(); if (parentJoin != null) { parentJoin.setGroup(null); // set group usage to NEVER on old parent join parentJoin.setGroupingUsage(GroupingUsage.NEVER); } // Move table to group. Get the children first, because moving the table // to another group will cause // getChildJoins() to return empty. List<Join> children = table.getChildJoins(); setTablesGroup(table, group); // Move the join to the group join.setGroup(group); // set group usage to ALWAYS on new join join.getSourceTypes().add(SourceType.USER); join.setGroupingUsage(GroupingUsage.ALWAYS); moveTree(children, group); } public void moveTreeToEmptyGroup(String schemaName, String tableName, String groupName) { moveTreeToEmptyGroup(schemaName, tableName, findFullGroupName(groupName)); } public void moveTreeToEmptyGroup(String schemaName, String tableName, TableName groupName) { LOG.info("moveTree: " + schemaName + "." + tableName + " -> empty group " + groupName); // table UserTable table = ais.getUserTable(schemaName, tableName); checkFound(table, "moving tree", "table", concat(schemaName, tableName)); // group Group group = ais.getGroup(groupName); checkFound(group, "moving tree", "group", groupName.toString()); // Remove table's parent join from its current group (if there is a // parent) Join parentJoin = table.getParentJoin(); if (parentJoin != null) { parentJoin.setGroup(null); } // find all candidate parent joins and set usage to NEVER to indicate // table should be ROOT for (Join canParentJoin : table.getCandidateParentJoins()) { canParentJoin.setGroupingUsage(GroupingUsage.NEVER); } // Move table to group. Get the children first (see comment in // moveTreeToGroup). List<Join> children = table.getChildJoins(); setTablesGroup(table, group); moveTree(children, group); } public void groupingIsComplete() { LOG.info("groupingIsComplete"); // Hook up root tables for(Group group : ais.getGroups().values()) { setRootIfNeeded(group); } } public void clearGroupings() { LOG.info("clear groupings"); ais.getGroups().clear(); for (UserTable table : ais.getUserTables().values()) { setTablesGroup(table, null); } for (Join join : ais.getJoins().values()) { join.setGroup(null); } } // API for getting the created AIS public AkibanInformationSchema akibanInformationSchema() { LOG.info("getting AIS"); return ais; } private UserTable findRoot(Group group) { UserTable root = null; for(UserTable table : ais.getUserTables().values()) { if((table.getGroup() == group) && table.isRoot()) { if(root != null) { return null; // Multiple roots } root = table; } } return root; } private void setRootIfNeeded(Group group) { if(group.getRoot() == null) { group.setRootTable(findRoot(group)); } } private void moveTree(List<Join> joins, Group group) { LOG.debug("moving tree " + joins + " to group " + group); for (Join join : joins) { List<Join> children = join.getChild().getChildJoins(); setTablesGroup(join.getChild(), group); join.setGroup(group); moveTree(children, group); } } private void checkFound(Object object, String action, String needed, String name) { if (object == null) { throw new NoSuchObjectException(action, needed, name); } } private void checkGroupAddition(Group group, Group existingGroup, String name) { if (existingGroup != null && existingGroup != group) { throw new GroupStructureException(group, existingGroup, name); } } private void checkInGroup(Group group, HasGroup object, String action, String objectDescription) { if (object.getGroup() != group) { throw new NotInGroupException(group, object, action, objectDescription); } } private void checkCycle(UserTable table, Group group) { if (table.getGroup() == group) { String exception = table + " is already in " + group + ". Group must be acyclic"; throw new GroupStructureException(exception); } } private String concat(String... strings) { StringBuilder buffer = new StringBuilder(); for (int i = 0; i < strings.length; i++) { if (i > 0) { buffer.append("."); } buffer.append(strings[i]); } return buffer.toString(); } private void setTablesGroup(Table table, Group group) { table.setGroup(group); } public int getTableIdOffset() { return tableIdGenerator; } public int getIndexIdOffset() { return indexIdGenerator; } /** * Tree names are normally set when adding a table to a group (all tables in a group * must have the same tree name). If testing parts of builder that aren't grouped and * LIVE_VALIDATIONS are called, this is a simple work around for that. */ public void setGroupTreeNamesForTest() { for(Group group : ais.getGroups().values()) { if(group.getTreeName() == null) { group.setTreeName(group.getName().toString()); } } } private TableName findFullGroupName(String groupName) { Group group = ais.getGroup(groupName); checkFound(group, "looking up group without schema", "group", groupName); return group.getName(); } // State static final class ColumnName { private final TableName table; private final String columnName; public ColumnName(TableName table, String columnName) { this.table = table; this.columnName = columnName; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((table == null) ? 0 : table.hashCode()); result = prime * result + ((columnName == null) ? 0 : columnName.hashCode()); return result; } @Override public boolean equals(Object object) { if (object == this) return true; if (!(object instanceof ColumnName)) return false; ColumnName other = (ColumnName) object; if (this.table == null) { if(other.table != null) return false; } else if (!this.table.equals(other.table)) { return false; } return (this.columnName == null) ? other.columnName == null : this.columnName.equals(other.columnName); } } public final static int MAX_COLUMN_NAME_LENGTH = 64; private final AkibanInformationSchema ais; private Map<String, ForwardTableReference> forwardReferences = // join name // ForwardTableReference new LinkedHashMap<String, ForwardTableReference>(); private NameGenerator nameGenerator; // This is temporary. We need unique ids generated here until the // server assigns them. private int tableIdGenerator = 0; private int indexIdGenerator = 1; // Inner classes private class ForwardTableReference { public ForwardTableReference(String joinName, TableName parentTableName, UserTable childTable) { this.joinName = joinName; this.parentTableName = parentTableName; this.childTable = childTable; } public String joinName() { return joinName; } public TableName parentTableName() { return parentTableName; } public UserTable childTable() { return childTable; } public void addColumnReference(String parentColumnName, Column childColumn) { forwardColumnReferences.add(new ForwardColumnReference( parentColumnName, childColumn)); } public List<ForwardColumnReference> forwardColumnReferences() { return forwardColumnReferences; } private final String joinName; private final UserTable childTable; private final TableName parentTableName; private final List<ForwardColumnReference> forwardColumnReferences = new ArrayList<ForwardColumnReference>(); } private class ForwardColumnReference { public ForwardColumnReference(String parentColumnName, Column childColumn) { this.parentColumnName = parentColumnName; this.childColumn = childColumn; } public String parentColumnName() { return parentColumnName; } public Column childColumn() { return childColumn; } private final String parentColumnName; private final Column childColumn; } public static class NoSuchObjectException extends RuntimeException { public NoSuchObjectException(String action, String needed, String name) { super("While " + action + ", could not find " + needed + " " + name); } } public static class GroupStructureException extends RuntimeException { public GroupStructureException(Group group, Group existingGroup, String name) { super(name + " already belongs to group " + existingGroup.getName() + " so it cannot be associated with group " + group.getName()); } public GroupStructureException(String message) { super(message); } } public static class GroupNotEmptyException extends RuntimeException { public GroupNotEmptyException(Group group) { super( "Group " + group.getName() + " cannot be deleted because it contains at least one user table."); } } public class NotInGroupException extends RuntimeException { public NotInGroupException(Group group, HasGroup object, String action, String objectDescription) { super("While " + action + ", found " + objectDescription + " not in " + group + ", but in " + object.getGroup() + " instead."); } } }
package com.enderio.core.common; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.enderio.core.api.common.util.ITankAccess; import com.enderio.core.common.util.FluidUtil; import net.minecraft.block.Block; import net.minecraft.block.SoundType; import net.minecraft.block.material.MapColor; import net.minecraft.block.material.Material; import net.minecraft.block.state.BlockFaceShape; import net.minecraft.block.state.IBlockState; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumHand; import net.minecraft.util.NonNullList; import net.minecraft.util.math.BlockPos; import net.minecraft.world.ChunkCache; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import net.minecraft.world.chunk.Chunk.EnumCreateEntityType; public abstract class BlockEnder<T extends TileEntityBase> extends Block { protected final @Nullable Class<? extends T> teClass; protected BlockEnder(@Nullable Class<? extends T> teClass) { this(teClass, new Material(MapColor.IRON), MapColor.IRON); } protected BlockEnder(@Nullable Class<? extends T> teClass, @Nonnull Material mat) { this(teClass, mat, mat.getMaterialMapColor()); } protected BlockEnder(@Nullable Class<? extends T> teClass, @Nonnull Material mat, MapColor mapColor) { super(mat); this.teClass = teClass; setHardness(0.5F); setSoundType(SoundType.METAL); setHarvestLevel("pickaxe", 0); } @Override public boolean hasTileEntity(@Nonnull IBlockState state) { return teClass != null; } @Override public @Nonnull TileEntity createTileEntity(@Nonnull World world, @Nonnull IBlockState state) { if (teClass != null) { try { T te = teClass.newInstance(); te.setWorldCreate(world); te.init(); return te; } catch (Exception e) { throw new RuntimeException("Could not create tile entity for block " + getLocalizedName() + " for class " + teClass, e); } } throw new RuntimeException( "Cannot create a TileEntity for a block that doesn't have a TileEntity. This is not a problem with EnderCore, this is caused by the caller."); } /* Subclass Helpers */ @Override public boolean onBlockActivated(@Nonnull World worldIn, @Nonnull BlockPos pos, @Nonnull IBlockState state, @Nonnull EntityPlayer playerIn, @Nonnull EnumHand hand, @Nonnull EnumFacing side, float hitX, float hitY, float hitZ) { if (playerIn.isSneaking()) { return false; } TileEntity te = getTileEntity(worldIn, pos); if (te instanceof ITankAccess) { if (FluidUtil.fillInternalTankFromPlayerHandItem(worldIn, pos, playerIn, hand, (ITankAccess) te)) { return true; } if (FluidUtil.fillPlayerHandItemFromInternalTank(worldIn, pos, playerIn, hand, (ITankAccess) te)) { return true; } } return openGui(worldIn, pos, playerIn, side); } protected boolean openGui(@Nonnull World world, @Nonnull BlockPos pos, @Nonnull EntityPlayer entityPlayer, @Nonnull EnumFacing side) { return false; } @Override public boolean removedByPlayer(@Nonnull IBlockState state, @Nonnull World world, @Nonnull BlockPos pos, @Nonnull EntityPlayer player, boolean willHarvest) { if (willHarvest) { return true; } return super.removedByPlayer(state, world, pos, player, willHarvest); } @Override public void harvestBlock(@Nonnull World worldIn, @Nonnull EntityPlayer player, @Nonnull BlockPos pos, @Nonnull IBlockState state, @Nullable TileEntity te, @Nonnull ItemStack stack) { super.harvestBlock(worldIn, player, pos, state, te, stack); worldIn.setBlockToAir(pos); } @Override public final void getDrops(@Nonnull NonNullList<ItemStack> drops, @Nonnull IBlockAccess world, @Nonnull BlockPos pos, @Nonnull IBlockState state, int fortune) { final T te = getTileEntity(world, pos); final ItemStack drop = getNBTDrop(world, pos, state, fortune, te); if (drop != null) { drops.add(drop); } getExtraDrops(drops, world, pos, state, fortune, te); } public @Nullable ItemStack getNBTDrop(@Nonnull IBlockAccess world, @Nonnull BlockPos pos, @Nonnull IBlockState state, int fortune, @Nullable T te) { ItemStack itemStack = new ItemStack(this, 1, damageDropped(state)); processDrop(world, pos, te, itemStack); return itemStack; } protected final void processDrop(@Nonnull IBlockAccess world, @Nonnull BlockPos pos, @Nullable T te, @Nonnull ItemStack drop) { if (te != null) { te.writeCustomNBT(drop); } } public void getExtraDrops(@Nonnull NonNullList<ItemStack> drops, @Nonnull IBlockAccess world, @Nonnull BlockPos pos, @Nonnull IBlockState state, int fortune, @Nullable T te) { } @Override public final void onBlockPlacedBy(@Nonnull World worldIn, @Nonnull BlockPos pos, @Nonnull IBlockState state, @Nonnull EntityLivingBase placer, @Nonnull ItemStack stack) { onBlockPlaced(worldIn, pos, state, placer, stack); T te = getTileEntity(worldIn, pos); if (te != null) { te.readCustomNBT(stack); onBlockPlaced(worldIn, pos, state, placer, te); } } public void onBlockPlaced(@Nonnull World worldIn, @Nonnull BlockPos pos, @Nonnull IBlockState state, @Nonnull EntityLivingBase placer, @Nonnull ItemStack stack) { } public void onBlockPlaced(@Nonnull World worldIn, @Nonnull BlockPos pos, @Nonnull IBlockState state, @Nonnull EntityLivingBase placer, @Nonnull T te) { } /** * Tries to load this block's TileEntity if it exists. Will create the TileEntity if it doesn't yet exist. * <p> * <strong>This will crash if used in any other thread than the main (client or server) thread!</strong> * */ protected @Nullable T getTileEntity(@Nonnull IBlockAccess world, @Nonnull BlockPos pos) { final Class<? extends T> teClass2 = teClass; if (teClass2 != null) { TileEntity te = world.getTileEntity(pos); if (teClass2.isInstance(te)) { return teClass2.cast(te); } } return null; } /** * Tries to load this block's TileEntity if it exists. Will not create the TileEntity when used in a render thread with the correct IBlockAccess. * */ protected @Nullable T getTileEntitySafe(@Nonnull IBlockAccess world, @Nonnull BlockPos pos) { if (world instanceof ChunkCache) { final Class<? extends T> teClass2 = teClass; if (teClass2 != null) { TileEntity te = ((ChunkCache) world).getTileEntity(pos, EnumCreateEntityType.CHECK); if (teClass2.isInstance(te)) { return teClass2.cast(te); } } return null; } else { return getTileEntity(world, pos); } } /** * Tries to load any block's TileEntity if it exists. Will not create the TileEntity when used in a render thread with the correct IBlockAccess. Will not * cause chunk loads. * */ public static @Nullable TileEntity getAnyTileEntitySafe(@Nonnull IBlockAccess world, @Nonnull BlockPos pos) { return getAnyTileEntitySafe(world, pos, TileEntity.class); } /** * Tries to load any block's TileEntity if it exists. Will not create the TileEntity when used in a render thread with the correct IBlockAccess. Will not * cause chunk loads. Also works with interfaces as the class parameter. * */ @SuppressWarnings("unchecked") public static @Nullable <Q> Q getAnyTileEntitySafe(@Nonnull IBlockAccess world, @Nonnull BlockPos pos, Class<Q> teClass) { TileEntity te = null; if (world instanceof ChunkCache) { te = ((ChunkCache) world).getTileEntity(pos, EnumCreateEntityType.CHECK); } else if (world instanceof World) { if (((World) world).isBlockLoaded(pos)) { te = world.getTileEntity(pos); } } else { te = world.getTileEntity(pos); } if (teClass == null) { return (Q) te; } if (teClass.isInstance(te)) { return teClass.cast(te); } return null; } /** * Tries to load any block's TileEntity if it exists. Not suitable for tasks outside the main thread. Also works with interfaces as the class parameter. * */ @SuppressWarnings("unchecked") public static @Nullable <Q> Q getAnyTileEntity(@Nonnull IBlockAccess world, @Nonnull BlockPos pos, Class<Q> teClass) { TileEntity te = world.getTileEntity(pos); if (teClass == null) { return (Q) te; } if (teClass.isInstance(te)) { return teClass.cast(te); } return null; } protected boolean shouldDoWorkThisTick(@Nonnull World world, @Nonnull BlockPos pos, int interval) { T te = getTileEntity(world, pos); if (te == null) { return world.getTotalWorldTime() % interval == 0; } else { return te.shouldDoWorkThisTick(interval); } } protected boolean shouldDoWorkThisTick(@Nonnull World world, @Nonnull BlockPos pos, int interval, int offset) { T te = getTileEntity(world, pos); if (te == null) { return (world.getTotalWorldTime() + offset) % interval == 0; } else { return te.shouldDoWorkThisTick(interval, offset); } } public Class<? extends T> getTeClass() { return teClass; } // wrapper because vanilla null-annotations are wrong @SuppressWarnings("null") @Override public @Nonnull Block setCreativeTab(@Nullable CreativeTabs tab) { return super.setCreativeTab(tab); } public void setShape(IShape<T> shape) { this.shape = shape; } @Override public final @Nonnull BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) { if (shape != null) { T te = getTileEntitySafe(worldIn, pos); if (te != null) { return shape.getBlockFaceShape(worldIn, state, pos, face, te); } else { return shape.getBlockFaceShape(worldIn, state, pos, face); } } return super.getBlockFaceShape(worldIn, state, pos, face); } private IShape<T> shape = null; public static interface IShape<T> { @Nonnull BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face); default @Nonnull BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face, @Nonnull T te) { return getBlockFaceShape(worldIn, state, pos, face); } } protected @Nonnull IShape<T> mkShape(@Nonnull BlockFaceShape allFaces) { return new IShape<T>() { @Override @Nonnull public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) { return allFaces; } }; } protected @Nonnull IShape<T> mkShape(@Nonnull BlockFaceShape upDown, @Nonnull BlockFaceShape allSides) { return new IShape<T>() { @Override @Nonnull public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) { return face == EnumFacing.UP || face == EnumFacing.DOWN ? upDown : allSides; } }; } protected @Nonnull IShape<T> mkShape(@Nonnull BlockFaceShape down, @Nonnull BlockFaceShape up, @Nonnull BlockFaceShape allSides) { return new IShape<T>() { @Override @Nonnull public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) { return face == EnumFacing.UP ? up : face == EnumFacing.DOWN ? down : allSides; } }; } protected @Nonnull IShape<T> mkShape(@Nonnull BlockFaceShape... faces) { return new IShape<T>() { @SuppressWarnings("null") @Override @Nonnull public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) { return faces[face.ordinal()]; } }; } }
package de.sajato.logw; import java.lang.reflect.Method; public class NameDiscoverer { private static Method stackTraceMethod; static { try { stackTraceMethod = Throwable.class.getDeclaredMethod("getStackTraceElement", int.class); stackTraceMethod.setAccessible(true); StackTraceElement stackTraceElement = (StackTraceElement) stackTraceMethod.invoke(new Throwable(), 0); if (!NameDiscoverer.class.getName().equals(stackTraceElement.getClassName())) { stackTraceMethod = null; } } catch (Throwable ex) { stackTraceMethod = null; } } public static String discoverer(){ return discoverer(5); } public static String discoverer(int deep){ return getStackTraceElement(deep).getClassName(); } @SuppressWarnings("all") private static StackTraceElement getStackTraceElement(final int deep) { if (stackTraceMethod != null) { try { return (StackTraceElement) stackTraceMethod.invoke(new Throwable(), deep); } catch (Exception ex) { // Failed to get single stack trace element from throwable. } } return new Throwable().getStackTrace()[deep]; } }
package com.codecheck123.dumb_bdd; import java.util.ArrayList; import java.util.List; public class Given extends AbstractBDD { private final UserStory userStory; private final List<String> given = new ArrayList<String>(); Given(UserStory userStory, String bddExpression, ExpressionRunner runner){ this.userStory = userStory; given.add(bddExpression); evaluateExpression(bddExpression,runner); } UserStory getUserStory() { return userStory; } List<String> getAllGiven() { return given; } public Given and(String bddExpression, ExpressionRunner runner){ given.add(bddExpression); evaluateExpression(bddExpression,runner); return this; } public When when(String bddExpression, ExpressionRunner runner){ evaluateExpression(bddExpression,runner); return new When(this,bddExpression); } }
package edu.ncsu.dlf.database; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import com.mongodb.BasicDBObject; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; import com.mongodb.MongoClient; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; import edu.ncsu.dlf.model.PDFUser; import edu.ncsu.dlf.model.Review; import org.eclipse.egit.github.core.User; import org.eclipse.egit.github.core.service.UserService; public class MongoDB implements DBAbstraction { private static final String DB_NAME = "pdfreview"; private MongoClient mongoClient; public MongoDB() throws IOException { String portNumber = System.getenv("OPENSHIFT_MONGODB_DB_PORT"); ServerAddress address = new ServerAddress(System.getenv("OPENSHIFT_MONGODB_DB_HOST"), Integer.parseInt(portNumber)); String user = System.getenv("OPENSHIFT_MONGODB_DB_USERNAME"); String password = System.getenv("OPENSHIFT_MONGODB_DB_PASSWORD"); MongoCredential credential = MongoCredential.createCredential(user, DB_NAME, password.toCharArray()); this.mongoClient = new MongoClient(address, Arrays.asList(credential)); } @Override public List<Review> getPendingReviews(User user, UserService userService) { return findRequests(user, "Reviewer"); } @Override public List<Review> getPendingReviewRequests(User user, UserService userService) { return findRequests(user, "Requester"); } private List<Review> findRequests(User userToLookFor, String whichUser) { List<Review> retVal = new ArrayList<>(); DB db = mongoClient.getDB(DB_NAME); DBCollection coll = db.getCollection(DB_NAME); coll.setObjectClass(Review.class); BasicDBObject query = new BasicDBObject(whichUser +".Login", userToLookFor.getLogin()); DBCursor cursor = coll.find(query); try { while (cursor.hasNext()) { DBObject element = cursor.next(); retVal.add((Review) element); } } finally { cursor.close(); } return retVal; } @Override public void addReviewToDatastore(Review newReview) { DB db = mongoClient.getDB(DB_NAME); DBCollection coll = db.getCollection(DB_NAME); coll.save(newReview); } @Override public void removeReviewFromDatastore(String reviewer, String writer, String repo) { // TODO Auto-generated method stub } }
package com.conveyal.gtfs.model; import com.conveyal.gtfs.GTFSFeed; import com.google.common.base.Joiner; import com.vividsolutions.jts.geom.LineString; import java.io.Serializable; import java.util.*; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Pattern extends Entity { public static final long serialVersionUID = 1L; // A unique ID for this jouney pattern / stop pattern public String pattern_id; // The segment of the pattern's geometry (which is always a LineString) on which each stop in the sequence falls. public int[] segmentIndex; // The percentage in [0..1] along the line segment at which each stop in the sequence falls. public double[] segmentFraction; public List<String> orderedStops; // TODO: change list of trips to set public List<String> associatedTrips; // TODO: add set of shapes // public Set<String> associatedShapes; public LineString geometry; public String name; public String route_id; public static Joiner joiner = Joiner.on("-").skipNulls(); public String feed_id; // TODO: Should a Pattern be generated for a single trip or a set of trips that share the same ordered stop list? /** * * @param orderedStops * @param trips the first trip will serve as an exemplar for all the others. * @param patternGeometry */ public Pattern (List<String> orderedStops, Collection<Trip> trips, LineString patternGeometry){ // Temporarily make a random ID for the pattern, which might be overwritten in a later step ? this.pattern_id = UUID.randomUUID().toString(); // Assign ordered list of stop IDs to be the key of this pattern. // FIXME what about pickup / dropoff type? this.orderedStops = orderedStops; // Save the string IDs of the trips on this pattern. this.associatedTrips = trips.stream().map(t -> t.trip_id).collect(Collectors.toList()); // In theory all trips could take different paths and be on different routes. // Here we're using only the first one as an exemplar. String trip_id = associatedTrips.get(0); Trip exemplarTrip = trips.iterator().next(); this.geometry = patternGeometry; // feed.getTripGeometry(exemplarTrip.trip_id); // Patterns have one and only one route. // FIXME are we certain we're only passing in trips on one route? or are we losing information here? this.route_id = exemplarTrip.route_id; // A name is assigned to this pattern based on the headsign, short name, direction ID or stop IDs. // This is not at all guaranteed to be unique, it's just to help identify the pattern. if (exemplarTrip.trip_headsign != null){ name = exemplarTrip.trip_headsign; } else if (exemplarTrip.trip_short_name != null) { name = exemplarTrip.trip_short_name; } else if (exemplarTrip.direction_id >= 0){ name = String.valueOf(exemplarTrip.direction_id); } else{ name = joiner.join(orderedStops); } // TODO: Implement segmentIndex using JTS to segment out LineString by stops. // TODO: Implement segmentFraction using JTS to segment out LineString by stops. } }
package com.gamingmesh.jobs.Signs; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.block.Skull; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.file.YamlConfiguration; import com.gamingmesh.jobs.Jobs; import com.gamingmesh.jobs.config.CommentedYamlConfiguration; import com.gamingmesh.jobs.container.TopList; public class SignUtil { public SignInfo Signs = new SignInfo(); private Jobs plugin; public SignUtil(Jobs plugin) { this.plugin = plugin; } public SignInfo getSigns() { return Signs; } // Sign file public void LoadSigns() { Signs.GetAllSigns().clear(); File file = new File(plugin.getDataFolder(), "Signs.yml"); YamlConfiguration f = YamlConfiguration.loadConfiguration(file); if (!f.isConfigurationSection("Signs")) return; ConfigurationSection ConfCategory = f.getConfigurationSection("Signs"); ArrayList<String> categoriesList = new ArrayList<String>(ConfCategory.getKeys(false)); if (categoriesList.size() == 0) return; for (String category : categoriesList) { ConfigurationSection NameSection = ConfCategory.getConfigurationSection(category); com.gamingmesh.jobs.Signs.Sign newTemp = new com.gamingmesh.jobs.Signs.Sign(); newTemp.setCategory(Integer.valueOf(category)); newTemp.setWorld(NameSection.getString("World")); newTemp.setX(NameSection.getDouble("X")); newTemp.setY(NameSection.getDouble("Y")); newTemp.setZ(NameSection.getDouble("Z")); newTemp.setNumber(NameSection.getInt("Number")); newTemp.setJobName(NameSection.getString("JobName")); newTemp.setSpecial(NameSection.getBoolean("Special")); Signs.addSign(newTemp); } return; } // Signs save file public void saveSigns() { File f = new File(plugin.getDataFolder(), "Signs.yml"); YamlConfiguration conf = YamlConfiguration.loadConfiguration(f); CommentedYamlConfiguration writer = new CommentedYamlConfiguration(); conf.options().copyDefaults(true); writer.addComment("Signs", "DO NOT EDIT THIS FILE BY HAND!"); if (!conf.isConfigurationSection("Signs")) conf.createSection("Signs"); for (com.gamingmesh.jobs.Signs.Sign one : Signs.GetAllSigns()) { String path = "Signs." + String.valueOf(one.GetCategory()); writer.set(path + ".World", one.GetWorld()); writer.set(path + ".X", one.GetX()); writer.set(path + ".Y", one.GetY()); writer.set(path + ".Z", one.GetZ()); writer.set(path + ".Number", one.GetNumber()); writer.set(path + ".JobName", one.GetJobName()); writer.set(path + ".Special", one.isSpecial()); } try { writer.save(f); } catch (IOException e) { e.printStackTrace(); } return; } public boolean SignUpdate(String JobName) { List<com.gamingmesh.jobs.Signs.Sign> Copy = new ArrayList<com.gamingmesh.jobs.Signs.Sign>(Signs.GetAllSigns().size()); for (com.gamingmesh.jobs.Signs.Sign foo : Signs.GetAllSigns()) { Copy.add(foo); } int timelapse = 1; for (com.gamingmesh.jobs.Signs.Sign one : Copy) { String SignJobName = one.GetJobName(); if (JobName.equalsIgnoreCase(SignJobName)) { String SignsWorld = one.GetWorld(); double SignsX = one.GetX(); double SignsY = one.GetY(); double SignsZ = one.GetZ(); int number = one.GetNumber() - 1; List<TopList> PlayerList = new ArrayList<TopList>(); if (!JobName.equalsIgnoreCase("gtoplist")) { PlayerList = Jobs.getJobsDAO().toplist(SignJobName, number); } else { PlayerList = Jobs.getJobsDAO().getGlobalTopList(number); } if (PlayerList.size() != 0) { World world = Bukkit.getWorld(SignsWorld); if (world == null) continue; Location nloc = new Location(world, SignsX, SignsY, SignsZ); Block block = nloc.getBlock(); if (!(block.getState() instanceof org.bukkit.block.Sign)) { Signs.GetAllSigns().remove(one); saveSigns(); } else { org.bukkit.block.Sign sign = (org.bukkit.block.Sign) block.getState(); if (!one.isSpecial()) { for (int i = 0; i < 4; i++) { if (i >= PlayerList.size()) { break; } String PlayerName = PlayerList.get(i).getPlayerName(); if (PlayerName != null && PlayerName.length() > 8) { String PlayerNameStrip = PlayerName.split("(?<=\\G.{7})")[0]; PlayerName = PlayerNameStrip + "~"; } if (PlayerName == null) PlayerName = "Unknown"; String line = Jobs.getLanguage().getMessage("signs.List"); line = line.replace("[number]", String.valueOf(i + number + 1)); line = line.replace("[player]", PlayerName); line = line.replace("[level]", String.valueOf(PlayerList.get(i).getLevel())); sign.setLine(i, line); } sign.update(); UpdateHead(sign, PlayerList.get(0).getPlayerName(), timelapse); } else { String PlayerName = PlayerList.get(0).getPlayerName(); if (PlayerName.length() > 8) { String PlayerNameStrip = PlayerName.split("(?<=\\G.{7})")[0]; PlayerName = PlayerNameStrip + "~"; } String line1 = Jobs.getLanguage().getMessage("signs.SpecialList." + one.GetNumber() + ".1"); line1 = line1.replace("[number]", String.valueOf(one.GetNumber() + number + 1)); line1 = line1.replace("[player]", PlayerName); line1 = line1.replace("[level]", String.valueOf(PlayerList.get(0).getLevel())); line1 = line1.replace("[job]", JobName); sign.setLine(0, line1); line1 = Jobs.getLanguage().getMessage("signs.SpecialList." + one.GetNumber() + ".2"); line1 = line1.replace("[number]", String.valueOf(one.GetNumber() + number + 1)); line1 = line1.replace("[player]", PlayerName); line1 = line1.replace("[level]", String.valueOf(PlayerList.get(0).getLevel())); line1 = line1.replace("[job]", JobName); sign.setLine(1, line1); line1 = Jobs.getLanguage().getMessage("signs.SpecialList." + one.GetNumber() + ".3"); line1 = line1.replace("[number]", String.valueOf(one.GetNumber() + number + 1)); line1 = line1.replace("[player]", PlayerName); line1 = line1.replace("[level]", String.valueOf(PlayerList.get(0).getLevel())); line1 = line1.replace("[job]", JobName); sign.setLine(2, line1); line1 = Jobs.getLanguage().getMessage("signs.SpecialList." + one.GetNumber() + ".4"); line1 = line1.replace("[number]", String.valueOf(one.GetNumber() + number + 1)); line1 = line1.replace("[player]", PlayerName); line1 = line1.replace("[level]", String.valueOf(PlayerList.get(0).getLevel())); line1 = line1.replace("[job]", JobName); sign.setLine(3, line1); sign.update(); UpdateHead(sign, PlayerList.get(0).getPlayerName(), timelapse); } timelapse++; } } } } return true; } public void UpdateHead(final org.bukkit.block.Sign sign, final String Playername, final int timelapse) { Bukkit.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { @Override @SuppressWarnings("deprecation") public void run() { org.bukkit.material.Sign signMat = (org.bukkit.material.Sign) sign.getData(); BlockFace directionFacing = signMat.getFacing(); Location loc = sign.getLocation().clone(); loc.add(0, 1, 0); if (Playername == null) return; Block block = loc.getBlock(); if (block == null || !(block.getState() instanceof Skull)) loc.add(directionFacing.getOppositeFace().getModX(), 0, directionFacing.getOppositeFace().getModZ()); block = loc.getBlock(); if (block == null || !(block.getState() instanceof Skull)) return; Skull skull = (Skull) block.getState(); if (skull == null) return; skull.setOwner(Playername); skull.update(); return; } }, timelapse * Jobs.getGCManager().InfoUpdateInterval * 20L); } }
package endpoint.tools; import java.io.IOException; import java.io.PrintWriter; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.google.appengine.api.users.UserServiceFactory; public class ReIndexServlet extends HttpServlet { private static final long serialVersionUID = -3346681549334024512L; @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (!UserServiceFactory.getUserService().isUserAdmin()) { forbidden(resp); return; } resp.setContentType("text/plain"); execute(getPath(req), resp.getWriter()); } private void execute(String path, PrintWriter writer) { List<Long> ids = ReIndex.parse(path).now(); writer.print("Total re-indexed entities: " + ids.size()); } private String getPath(HttpServletRequest req) { return req.getRequestURI().substring(req.getServletPath().length()); } private void forbidden(HttpServletResponse resp) { resp.setStatus(403); return; } }
package com.dnmaze.dncli; import com.dnmaze.dncli.command.Command; import com.dnmaze.dncli.command.CommandDds; import com.dnmaze.dncli.command.CommandDnt; import com.dnmaze.dncli.command.CommandDnt.Execute; import com.dnmaze.dncli.command.CommandDnt.Process; import com.dnmaze.dncli.command.CommandPak; import com.dnmaze.dncli.command.CommandPak.Compress; import com.dnmaze.dncli.command.CommandPak.Detail; import com.dnmaze.dncli.command.CommandPak.Extract; import com.dnmaze.dncli.command.CommandPak.Inflate; import com.dnmaze.dncli.command.CommandPatch; import com.dnmaze.dncli.exception.InvalidDdsOutputFormatException; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterException; import lombok.SneakyThrows; import java.util.Map; public class CliApplication { /** * <p>The main program.</p> * * @param args the args */ @SneakyThrows public static void main(String[] args) { // register H2 Class.forName("org.h2.Driver"); Command command = new Command(); JCommander jc = new JCommander(command); jc.setProgramName("dncli"); // pak command setup CommandPak pak = command.getPak(); JCommander pakJc = addCommand(jc, "pak", pak); // pak subcommand setup pakJc.addCommand("compress", pak.getCompress()); pakJc.addCommand("extract", pak.getExtract()); pakJc.addCommand("inflate", pak.getInflate()); pakJc.addCommand("list", pak.getDetail()); // dnt command setup CommandDnt dnt = command.getDnt(); JCommander dntJc = addCommand(jc, "dnt", dnt); //dnt subcommand setup dntJc.addCommand("process", dnt.getProcess()); dntJc.addCommand("execute", dnt.getExecute()); // dds command setup CommandDds dds = command.getDds(); jc.addCommand("dds", dds); // patch command setup CommandPatch patch = command.getPatch(); jc.addCommand("patch", patch); // parse args and set the params! try { jc.parse(args); } catch (ParameterException | InvalidDdsOutputFormatException ex) { System.out.println(ex.getMessage()); System.exit(1); } String parsedCommand = jc.getParsedCommand(); // if no command or help was specified, show it! if (parsedCommand == null || command.isHelp()) { jc.usage(); System.exit(0); } // find out what command is being used try { switch (parsedCommand) { case "pak": String pakCommand = pakJc.getParsedCommand(); if (pakCommand == null || pak.isHelp()) { jc.usage("pak"); System.exit(1); } switch (pakCommand) { case "compress": Compress compress = pak.getCompress(); if (compress.isHelp()) { pakJc.usage("compress"); System.exit(1); } compress.run(); break; case "extract": Extract extract = pak.getExtract(); if (extract.isHelp()) { pakJc.usage("extract"); System.exit(1); } extract.run(); break; case "inflate": Inflate inflate = pak.getInflate(); if (inflate.isHelp()) { pakJc.usage("inflate"); System.exit(1); } inflate.run(); break; case "list": Detail detail = pak.getDetail(); if (detail.isHelp()) { pakJc.usage("list"); System.exit(1); } detail.run(); break; default: throw new UnsupportedOperationException("Unknown pak command: '" + pakCommand + "'"); } break; case "dnt": String dntCommand = dntJc.getParsedCommand(); if (dntCommand == null || dnt.isHelp()) { jc.usage("dds"); System.exit(1); } switch (dntCommand) { case "process": Process process = dnt.getProcess(); if (process.isHelp()) { dntJc.usage("process"); System.exit(1); } dnt.getProcess().run(); break; case "execute": Execute execute = dnt.getExecute(); if (execute.isHelp()) { dntJc.usage("execute"); System.exit(1); } execute.run(); break; default: throw new UnsupportedOperationException("Unknown dnt command: '" + dntCommand + "'"); } break; case "dds": if (dds.isHelp()) { jc.usage("dds"); System.exit(1); } dds.run(); break; case "patch": if (patch.isHelp()) { jc.usage("patch"); System.exit(1); } patch.run(); break; default: throw new UnsupportedOperationException("Unknown command: '" + parsedCommand + "'"); } System.exit(0); } catch (Throwable th) { System.err.println(th.getMessage()); System.exit(1); } } private static JCommander addCommand(JCommander jcommander, String name, Object object) { jcommander.addCommand(name, object); Map<String, JCommander> commands = jcommander.getCommands(); return commands.get(name); } }
package com.github.NewsBotIRC; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLEncoder; public class UrlShortener { public static String shortenUrl(String myUrl) { String shortenedUrl = myUrl; try { String encoded = URLEncoder.encode(myUrl, "UTF-8"); URL url = new URL("https://tinyurl.com/api-create.php?url=" + encoded); HttpURLConnection http = (HttpURLConnection) url.openConnection(); BufferedReader reader = new BufferedReader(new InputStreamReader(http.getInputStream())); shortenedUrl = reader.readLine(); } catch (IOException e) { System.out.println(e.getMessage()); } return shortenedUrl; } }
package com.dnw.depmap.neo; import org.eclipse.jdt.core.dom.IBinding; import com.dnw.plugin.util.WeakCache; /** * Class/Interface BindingCache. * * @author manbaum * @since Oct 18, 2014 */ public final class BindingCache { /** * According to document of <tt>ASTParser.setResolveBindings()</tt>:<br/> * <blockquote>It is very important to not retain any of these objectes longer than absolutely * necessary.</blockquote> * * @author manbaum * @since Aug 26, 2015 */ public static final WeakCache<String, String> cache = new WeakCache<String, String>(); /** * Method put. * * @author manbaum * @since Oct 18, 2014 * @param key * @param value */ public static final void put(IBinding key, String value) { cache.put(key.getKey(), value); } /** * Method remove. * * @author manbaum * @since Oct 18, 2014 * @param key */ public static final void remove(IBinding key) { cache.remove(key.getKey()); } /** * Method contains. * * @author manbaum * @since Oct 18, 2014 * @param key * @return */ public static final boolean contains(IBinding key) { return cache.contains(key.getKey()); } /** * Method get. * * @author manbaum * @since Oct 18, 2014 * @param key * @return */ public static final String get(IBinding key) { return cache.get(key.getKey()); } /** * Method clear. * * @author manbaum * @since Oct 18, 2014 */ public static final void clear() { cache.clear(); } }
package com.easternedgerobotics.rov; import com.easternedgerobotics.rov.control.SixThrusterConfig; import com.easternedgerobotics.rov.event.BroadcastEventPublisher; import com.easternedgerobotics.rov.event.EventPublisher; import com.easternedgerobotics.rov.io.ADC; import com.easternedgerobotics.rov.io.CpuInformation; import com.easternedgerobotics.rov.io.CurrentSensor; import com.easternedgerobotics.rov.io.LM35; import com.easternedgerobotics.rov.io.Light; import com.easternedgerobotics.rov.io.MPX4250AP; import com.easternedgerobotics.rov.io.Motor; import com.easternedgerobotics.rov.io.PWM; import com.easternedgerobotics.rov.io.TMP36; import com.easternedgerobotics.rov.io.Thruster; import com.easternedgerobotics.rov.io.VoltageSensor; import com.easternedgerobotics.rov.io.pololu.Maestro; import com.easternedgerobotics.rov.math.Range; import com.easternedgerobotics.rov.value.CameraSpeedValueA; import com.easternedgerobotics.rov.value.CameraSpeedValueB; import com.easternedgerobotics.rov.value.ExternalPressureValueA; import com.easternedgerobotics.rov.value.ExternalPressureValueB; import com.easternedgerobotics.rov.value.ExternalTemperatureValue; import com.easternedgerobotics.rov.value.HeartbeatValue; import com.easternedgerobotics.rov.value.InternalPressureValue; import com.easternedgerobotics.rov.value.InternalTemperatureValue; import com.easternedgerobotics.rov.value.LightSpeedValue; import com.easternedgerobotics.rov.value.PortAftSpeedValue; import com.easternedgerobotics.rov.value.PortForeSpeedValue; import com.easternedgerobotics.rov.value.PortVertSpeedValue; import com.easternedgerobotics.rov.value.SpeedValue; import com.easternedgerobotics.rov.value.StarboardAftSpeedValue; import com.easternedgerobotics.rov.value.StarboardForeSpeedValue; import com.easternedgerobotics.rov.value.StarboardVertSpeedValue; import com.easternedgerobotics.rov.value.ToolingSpeedValue; import com.pi4j.io.serial.Serial; import com.pi4j.io.serial.SerialFactory; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.pmw.tinylog.Logger; import rx.Observable; import rx.Scheduler; import rx.broadcast.BasicOrder; import rx.broadcast.UdpBroadcast; import rx.schedulers.Schedulers; import rx.subjects.PublishSubject; import rx.subjects.Subject; import java.io.IOException; import java.net.DatagramSocket; import java.net.InetAddress; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; final class Rov { static final long MAX_HEARTBEAT_GAP = 5; static final long CPU_POLL_INTERVAL = 1; static final long SLEEP_DURATION = 100; static final byte MAESTRO_DEVICE_NUMBER = 0x01; static final byte PORT_AFT_CHANNEL = 15; static final byte STARBOARD_AFT_CHANNEL = 12; static final byte PORT_FORE_CHANNEL = 16; static final byte STARBOARD_FORE_CHANNEL = 13; static final byte PORT_VERT_CHANNEL = 17; static final byte STARBOARD_VERT_CHANNEL = 14; static final byte CAMERA_A_MOTOR_CHANNEL = 18; static final byte CAMERA_B_MOTOR_CHANNEL = 19; static final byte TOOLING_MOTOR_CHANNEL = 22; static final byte LIGHT_CHANNEL = 23; static final byte INTERNAL_TEMPERATURE_SENSOR_CHANNEL = 1; static final byte EXTERNAL_TEMPERATURE_SENSOR_CHANNEL = 3; static final byte INTERNAL_PRESSURE_SENSOR_CHANNEL = 2; static final byte EXTERNAL_PRESSURE_SENSOR_A_CHANNEL = 4; static final byte EXTERNAL_PRESSURE_SENSOR_B_CHANNEL = 5; static final byte VOLTAGE_SENSOR_05V_CHANNEL = 8; static final byte VOLTAGE_SENSOR_12V_CHANNEL = 7; static final byte VOLTAGE_SENSOR_48V_CHANNEL = 6; static final byte CURRENT_SENSOR_05V_CHANNEL = 11; static final byte CURRENT_SENSOR_12V_CHANNEL = 10; static final byte CURRENT_SENSOR_48V_CHANNEL = 9; private final LM35 internalTemperatureSensor; private final TMP36 externalTemperatureSensor; private final MPX4250AP internalPressureSensor; private final MPX4250AP externalPressureSensorA; private final MPX4250AP externalPressureSensorB; private final SixThrusterConfig thrusterConfig; private final List<Thruster> thrusters; private final List<Motor> motors; private final List<Light> lights; private final List<VoltageSensor> voltageSensors; private final List<CurrentSensor> currentSensors; private final EventPublisher eventPublisher; private final AtomicBoolean dead = new AtomicBoolean(); private final Subject<Void, Void> killSwitch = PublishSubject.create(); <T extends ADC & PWM> Rov( final EventPublisher eventPublisher, final List<T> channels ) { this.eventPublisher = eventPublisher; final PortAftSpeedValue portAft = new PortAftSpeedValue(); final StarboardAftSpeedValue starboardAft = new StarboardAftSpeedValue(); final PortForeSpeedValue portFore = new PortForeSpeedValue(); final StarboardForeSpeedValue starboardFore = new StarboardForeSpeedValue(); final PortVertSpeedValue portVert = new PortVertSpeedValue(); final StarboardVertSpeedValue starboardVert = new StarboardVertSpeedValue(); this.thrusterConfig = new SixThrusterConfig(eventPublisher); this.motors = Collections.unmodifiableList(Arrays.asList( new Motor( eventPublisher .valuesOfType(CameraSpeedValueA.class) .startWith(new CameraSpeedValueA()) .cast(SpeedValue.class), channels.get(CAMERA_A_MOTOR_CHANNEL).setOutputRange(new Range(Motor.MAX_REV, Motor.MAX_FWD))), new Motor( eventPublisher .valuesOfType(CameraSpeedValueB.class) .startWith(new CameraSpeedValueB()) .cast(SpeedValue.class), channels.get(CAMERA_B_MOTOR_CHANNEL).setOutputRange(new Range(Motor.MAX_REV, Motor.MAX_FWD))), new Motor( eventPublisher .valuesOfType(ToolingSpeedValue.class) .startWith(new ToolingSpeedValue()) .cast(SpeedValue.class), channels.get(TOOLING_MOTOR_CHANNEL).setOutputRange(new Range(Motor.MAX_REV, Motor.MAX_FWD))) )); this.thrusters = Collections.unmodifiableList(Arrays.asList( new Thruster( eventPublisher .valuesOfType(PortAftSpeedValue.class) .startWith(portAft) .cast(SpeedValue.class), channels.get(PORT_AFT_CHANNEL).setOutputRange(new Range(Thruster.MAX_REV, Thruster.MAX_FWD))), new Thruster( eventPublisher .valuesOfType(StarboardAftSpeedValue.class) .startWith(starboardAft) .cast(SpeedValue.class), channels.get(STARBOARD_AFT_CHANNEL).setOutputRange(new Range(Thruster.MAX_FWD, Thruster.MAX_REV))), new Thruster( eventPublisher .valuesOfType(PortForeSpeedValue.class) .startWith(portFore) .cast(SpeedValue.class), channels.get(PORT_FORE_CHANNEL).setOutputRange(new Range(Thruster.MAX_REV, Thruster.MAX_FWD))), new Thruster( eventPublisher .valuesOfType(StarboardForeSpeedValue.class) .startWith(starboardFore) .cast(SpeedValue.class), channels.get(STARBOARD_FORE_CHANNEL).setOutputRange(new Range(Thruster.MAX_FWD, Thruster.MAX_REV))), new Thruster( eventPublisher .valuesOfType(PortVertSpeedValue.class) .startWith(portVert) .cast(SpeedValue.class), channels.get(PORT_VERT_CHANNEL).setOutputRange(new Range(Thruster.MAX_FWD, Thruster.MAX_REV))), new Thruster( eventPublisher .valuesOfType(StarboardVertSpeedValue.class) .startWith(starboardVert) .cast(SpeedValue.class), channels.get(STARBOARD_VERT_CHANNEL).setOutputRange(new Range(Thruster.MAX_FWD, Thruster.MAX_REV))) )); this.lights = Collections.singletonList( new Light( eventPublisher .valuesOfType(LightSpeedValue.class) .startWith(new LightSpeedValue()) .cast(SpeedValue.class), channels.get(LIGHT_CHANNEL).setOutputRange(new Range(Light.MAX_REV, Light.MAX_FWD)) ) ); this.voltageSensors = Collections.unmodifiableList(Arrays.asList( VoltageSensor.V05.apply(channels.get(VOLTAGE_SENSOR_05V_CHANNEL)), VoltageSensor.V12.apply(channels.get(VOLTAGE_SENSOR_12V_CHANNEL)), VoltageSensor.V48.apply(channels.get(VOLTAGE_SENSOR_48V_CHANNEL)) )); this.currentSensors = Collections.unmodifiableList(Arrays.asList( CurrentSensor.V05.apply(channels.get(CURRENT_SENSOR_05V_CHANNEL)), CurrentSensor.V12.apply(channels.get(CURRENT_SENSOR_12V_CHANNEL)), CurrentSensor.V48.apply(channels.get(CURRENT_SENSOR_48V_CHANNEL)) )); this.internalTemperatureSensor = new LM35( channels.get(INTERNAL_TEMPERATURE_SENSOR_CHANNEL)); this.externalTemperatureSensor = new TMP36( channels.get(EXTERNAL_TEMPERATURE_SENSOR_CHANNEL)); this.internalPressureSensor = new MPX4250AP( channels.get(INTERNAL_PRESSURE_SENSOR_CHANNEL)); this.externalPressureSensorA = new MPX4250AP( channels.get(EXTERNAL_PRESSURE_SENSOR_A_CHANNEL)); this.externalPressureSensorB = new MPX4250AP( channels.get(EXTERNAL_PRESSURE_SENSOR_B_CHANNEL)); } void shutdown() { Logger.info("Shutting down"); killSwitch.onCompleted(); while (true) { if (dead.get()) { break; } } motors.forEach(Motor::writeZero); lights.forEach(Light::writeZero); thrusters.forEach(Thruster::writeZero); } /** * Initialises the ROV, attaching the hardware updates to their event source. The ROV will "timeout" * if communication with the topside is lost or the received heartbeat value indicates a non-operational * status and will shutdown. * @param io the scheduler to use for device I/O * @param clock the scheduler to use for timing */ void init(final Scheduler io, final Scheduler clock) { Logger.debug("Wiring up heartbeat, timeout, and thruster updates"); final Observable<HeartbeatValue> timeout = Observable.just(new HeartbeatValue(false)) .delay(MAX_HEARTBEAT_GAP, TimeUnit.SECONDS, clock) .doOnNext(heartbeat -> Logger.warn("Timeout while waiting for heartbeat")) .concatWith(Observable.never()); final Observable<HeartbeatValue> heartbeats = eventPublisher.valuesOfType(HeartbeatValue.class); final CpuInformation cpuInformation = new CpuInformation(CPU_POLL_INTERVAL, TimeUnit.SECONDS); thrusters.forEach(Thruster::writeZero); cpuInformation.observe().subscribe(eventPublisher::emit, Logger::warn); Observable.interval(SLEEP_DURATION, TimeUnit.MILLISECONDS, clock) .withLatestFrom( heartbeats.mergeWith(timeout.takeUntil(heartbeats).repeat()), (tick, heartbeat) -> heartbeat) .observeOn(io) .takeUntil(killSwitch) .subscribe(this::beat, RuntimeException::new, () -> dead.set(true)); } private void thrustersUpdate() { thrusterConfig.update(); thrusters.forEach(Thruster::write); } private void softShutdown() { thrusterConfig.updateZero(); thrusters.forEach(Thruster::writeZero); } private void beat(final HeartbeatValue heartbeat) { if (heartbeat.getOperational()) { thrustersUpdate(); lights.forEach(Light::write); motors.forEach(Motor::write); } else { softShutdown(); lights.forEach(Light::flash); motors.forEach(Motor::writeZero); } voltageSensors.forEach(sensor -> eventPublisher.emit(sensor.read())); currentSensors.forEach(sensor -> eventPublisher.emit(sensor.read())); eventPublisher.emit(new InternalTemperatureValue(internalTemperatureSensor.read())); eventPublisher.emit(new ExternalTemperatureValue(externalTemperatureSensor.read())); eventPublisher.emit(new InternalPressureValue(internalPressureSensor.read())); eventPublisher.emit(new ExternalPressureValueA(externalPressureSensorA.read())); eventPublisher.emit(new ExternalPressureValueB(externalPressureSensorB.read())); } public static void main(final String[] args) throws InterruptedException, IOException { final String app = "rov"; final HelpFormatter formatter = new HelpFormatter(); final Option broadcast = Option.builder("b") .longOpt("broadcast") .hasArg() .argName("ADDRESS") .desc("use ADDRESS to broadcast messages") .required() .build(); final Option serialPort = Option.builder("s") .longOpt("serial-port") .hasArg() .argName("FILE") .desc("read and write to FILE as serial device") .required() .build(); final Option baudRate = Option.builder("r") .type(Integer.class) .longOpt("baud-rate") .hasArg() .argName("BPS") .desc("the baud rate to use") .required() .build(); final Options options = new Options(); options.addOption(broadcast); options.addOption(serialPort); options.addOption(baudRate); try { final CommandLineParser parser = new DefaultParser(); final CommandLine arguments = parser.parse(options, args); final InetAddress broadcastAddress = InetAddress.getByName(arguments.getOptionValue("b")); final int broadcastPort = BroadcastEventPublisher.DEFAULT_BROADCAST_PORT; final DatagramSocket socket = new DatagramSocket(broadcastPort); final EventPublisher eventPublisher = new BroadcastEventPublisher(new UdpBroadcast<>( socket, broadcastAddress, broadcastPort, new BasicOrder<>())); final Serial serial = SerialFactory.createInstance(); final Rov rov = new Rov(eventPublisher, new Maestro<>(serial, MAESTRO_DEVICE_NUMBER)); Runtime.getRuntime().addShutdownHook(new Thread(rov::shutdown)); serial.open(arguments.getOptionValue("s"), Integer.parseInt(arguments.getOptionValue("r"))); rov.init(Schedulers.io(), Schedulers.computation()); Logger.info("Started"); eventPublisher.await(); } catch (final ParseException e) { formatter.printHelp(app, options, true); System.exit(1); } } }
package fredboat.audio; import com.sedmelluq.discord.lavaplayer.player.AudioPlayer; import com.sedmelluq.discord.lavaplayer.player.AudioPlayerManager; import com.sedmelluq.discord.lavaplayer.player.event.AudioEventAdapter; import com.sedmelluq.discord.lavaplayer.track.AudioTrack; import fredboat.audio.queue.AudioTrackProvider; import net.dv8tion.jda.audio.AudioSendHandler; public abstract class AbstractPlayer extends AudioEventAdapter implements AudioSendHandler { private static AudioPlayerManager playerManager; private AudioPlayer player; private AudioTrackProvider audioTrackProvider; @SuppressWarnings("LeakingThisInConstructor") protected AbstractPlayer() { initAudioPlayerManager(); player = new AudioPlayer(playerManager); player.addListener(this); } private static void initAudioPlayerManager() { playerManager = new AudioPlayerManager(); } public void play() { if (player.isPaused()) { player.setPaused(false); } } public void pause() { player.setPaused(true); } public void skip() { player.stopTrack(); } public boolean isPlaying() { return player.getPlayingTrack() != null && !player.isPaused(); } public long getCurrentTimestamp() { return player.getPlayingTrack().getPosition(); } public AudioTrack getPlayingTrack() { return player.getPlayingTrack(); } public void setVolume(float vol) { throw new UnsupportedOperationException("AudioPlayer does not yet support volume control"); } public float getVolume() { return -1f;//Not yet supported } public void setAudioTrackProvider(AudioTrackProvider audioTrackProvider) { this.audioTrackProvider = audioTrackProvider; } @Override public void onTrackEnd(AudioPlayer player, AudioTrack track, boolean interrupted) { if (audioTrackProvider != null) { player.playTrack(audioTrackProvider.provideAudioTrack()); } } @Override public boolean canProvide() { return isPlaying(); } @Override public byte[] provide20MsAudio() { return player.provide().data; } @Override public boolean isOpus() { return true; } }