answer
stringlengths
17
10.2M
package org.cytoscape.view.vizmap.gui.internal; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.WeakHashMap; import org.cytoscape.model.CyColumn; import org.cytoscape.model.CyEdge; import org.cytoscape.model.CyIdentifiable; import org.cytoscape.model.CyNetwork; import org.cytoscape.model.CyNetworkTableManager; import org.cytoscape.model.CyNode; import org.cytoscape.model.CyTable; import org.cytoscape.model.events.ColumnCreatedEvent; import org.cytoscape.model.events.ColumnCreatedListener; import org.cytoscape.model.events.ColumnDeletedEvent; import org.cytoscape.model.events.ColumnDeletedListener; import org.cytoscape.model.events.NetworkAboutToBeDestroyedEvent; import org.cytoscape.model.events.NetworkAboutToBeDestroyedListener; import org.cytoscape.model.events.NetworkAddedEvent; import org.cytoscape.model.events.NetworkAddedListener; public class AttributeSetManager implements ColumnDeletedListener, ColumnCreatedListener, NetworkAddedListener, NetworkAboutToBeDestroyedListener { private static final Set<Class<? extends CyIdentifiable>> GRAPH_OBJECTS; static { GRAPH_OBJECTS = new HashSet<Class<? extends CyIdentifiable>>(); GRAPH_OBJECTS.add(CyNode.class); GRAPH_OBJECTS.add(CyEdge.class); GRAPH_OBJECTS.add(CyNetwork.class); } private final CyNetworkTableManager tableMgr; private final Map<CyNetwork, Map<Class<? extends CyIdentifiable>, AttributeSet>> attrSets; private final Map<CyNetwork, Map<Class<? extends CyIdentifiable>, Set<CyTable>>> tableSets; public AttributeSetManager(final CyNetworkTableManager tableMgr) { this.tableMgr = tableMgr; this.attrSets = new WeakHashMap<CyNetwork, Map<Class<? extends CyIdentifiable>, AttributeSet>>(); this.tableSets = new WeakHashMap<CyNetwork, Map<Class<? extends CyIdentifiable>, Set<CyTable>>>(); } public AttributeSet getAttributeSet(final CyNetwork network, final Class<? extends CyIdentifiable> objectType) { if (network == null || objectType == null) throw new NullPointerException("Both parameters should not be null."); final Map<Class<? extends CyIdentifiable>, AttributeSet> attrSetMap = this.attrSets.get(network); if (attrSetMap == null) throw new NullPointerException("No such network registered in this mamager: " + network); return attrSetMap.get(objectType); } @Override public void handleEvent(NetworkAddedEvent e) { final CyNetwork network = e.getNetwork(); final Map<Class<? extends CyIdentifiable>, Set<CyTable>> object2tableMap = new HashMap<Class<? extends CyIdentifiable>, Set<CyTable>>(); final Map<Class<? extends CyIdentifiable>, AttributeSet> attrSetMap = new HashMap<Class<? extends CyIdentifiable>, AttributeSet>(); for (final Class<? extends CyIdentifiable> objectType : GRAPH_OBJECTS) { final Map<String, CyTable> tableMap = tableMgr.getTables(network, objectType); final Collection<CyTable> tables = tableMap.values(); object2tableMap.put(objectType, new HashSet<CyTable>(tables)); final AttributeSet attrSet = new AttributeSet(objectType); for (CyTable table : tables) { final Collection<CyColumn> columns = table.getColumns(); for (final CyColumn column : columns) { final Class<?> type = column.getType(); attrSet.getAttrMap().put(column.getName(), type); } } attrSetMap.put(objectType, attrSet); } this.attrSets.put(network, attrSetMap); this.tableSets.put(network, object2tableMap); } @Override public void handleEvent(NetworkAboutToBeDestroyedEvent e) { CyNetwork network = e.getNetwork(); attrSets.remove(network); tableSets.remove(network); } @Override public void handleEvent(ColumnCreatedEvent e) { final String newAttrName = e.getColumnName(); final CyTable table = e.getSource(); for (CyNetwork network : tableSets.keySet()) { Map<Class<? extends CyIdentifiable>, Set<CyTable>> tMap = tableSets.get(network); for (final Class<? extends CyIdentifiable> objectType : GRAPH_OBJECTS) { final Set<CyTable> targetTables = tMap.get(objectType); if (!targetTables.contains(table)) continue; this.attrSets.get(network).get(objectType).getAttrMap() .put(newAttrName, table.getColumn(newAttrName).getType()); return; } } } @Override public void handleEvent(ColumnDeletedEvent e) { final CyTable table = e.getSource(); for (CyNetwork network : tableSets.keySet()) { Map<Class<? extends CyIdentifiable>, Set<CyTable>> tMap = tableSets.get(network); for (final Class<? extends CyIdentifiable> objectType : GRAPH_OBJECTS) { final Set<CyTable> targetTables = tMap.get(objectType); if (!targetTables.contains(table)) continue; this.attrSets.get(network).get(objectType).getAttrMap().remove(e.getColumnName()); return; } } } }
package grakn.core.server; import grabl.tracing.client.GrablTracing; import grabl.tracing.client.GrablTracingThreadStatic; import grakn.common.concurrent.NamedThreadFactory; import grakn.core.Grakn; import grakn.core.common.exception.GraknException; import grakn.core.concurrent.common.Executors; import grakn.core.rocks.RocksGrakn; import grakn.core.server.migrator.MigratorClient; import grakn.core.server.rpc.GraknRPCService; import grakn.core.server.rpc.MigratorRPCService; import grakn.core.server.util.ServerCommand; import grakn.core.server.util.ServerDefaults; import io.grpc.Server; import io.grpc.netty.NettyServerBuilder; import io.netty.channel.socket.nio.NioServerSocketChannel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import picocli.CommandLine; import picocli.CommandLine.ParameterException; import picocli.CommandLine.PropertiesDefaultProvider; import picocli.CommandLine.UnmatchedArgumentException; import java.io.FileInputStream; import java.io.IOException; import java.net.BindException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; import java.time.Instant; import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import static grakn.core.common.exception.ErrorMessage.Server.ALREADY_RUNNING; import static grakn.core.common.exception.ErrorMessage.Server.DATA_DIRECTORY_NOT_FOUND; import static grakn.core.common.exception.ErrorMessage.Server.DATA_DIRECTORY_NOT_WRITABLE; import static grakn.core.common.exception.ErrorMessage.Server.ENV_VAR_NOT_FOUND; import static grakn.core.common.exception.ErrorMessage.Server.EXITED_WITH_ERROR; import static grakn.core.common.exception.ErrorMessage.Server.FAILED_AT_STOPPING; import static grakn.core.common.exception.ErrorMessage.Server.FAILED_PARSE_PROPERTIES; import static grakn.core.common.exception.ErrorMessage.Server.PROPERTIES_FILE_NOT_FOUND; import static grakn.core.common.exception.ErrorMessage.Server.UNCAUGHT_EXCEPTION; import static grakn.core.server.util.ServerDefaults.ASCII_LOGO_FILE; import static grakn.core.server.util.ServerDefaults.PROPERTIES_FILE; public class GraknServer implements AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(GraknServer.class); private static final int MAX_THREADS = Runtime.getRuntime().availableProcessors(); private final Grakn grakn; private final Server server; private final ServerCommand.Start command; private final GraknRPCService graknRPCService; private final MigratorRPCService migratorRPCService; private GraknServer(ServerCommand.Start command) throws IOException { this.command = command; configureAndVerifyDataDir(); configureTracing(); if (command.debug()) LOG.info("Running Grakn Core Server in debug mode."); grakn = RocksGrakn.open(command.dataDir()); graknRPCService = new GraknRPCService(grakn); migratorRPCService = new MigratorRPCService(grakn); server = rpcServer(); Thread.setDefaultUncaughtExceptionHandler( (t, e) -> LOG.error(UNCAUGHT_EXCEPTION.message(t.getName() + ": " + e.getMessage()), e) ); Runtime.getRuntime().addShutdownHook( NamedThreadFactory.create(GraknServer.class, "shutdown").newThread(this::close) ); initLoggerConfig(); } private Server rpcServer() { assert Executors.isInitialised(); return NettyServerBuilder.forPort(command.port()) .executor(Executors.mainPool()) .workerEventLoopGroup(Executors.networkPool()) .bossEventLoopGroup(Executors.networkPool()) .maxConnectionIdle(1, TimeUnit.HOURS) // TODO: why 1 hour? .channelType(NioServerSocketChannel.class) .addService(graknRPCService) .addService(migratorRPCService) .build(); } private void initLoggerConfig() { java.util.logging.Logger.getLogger("io.grpc").setLevel(Level.SEVERE); } private int port() { return command.port(); } private Path dataDir() { return command.dataDir(); } private void configureAndVerifyDataDir() throws IOException { if (!Files.isDirectory(this.command.dataDir())) { if (this.command.dataDir().equals(ServerDefaults.DATA_DIR)) { Files.createDirectory(this.command.dataDir()); } else { throw GraknException.of(DATA_DIRECTORY_NOT_FOUND, this.command.dataDir()); } } if (!Files.isWritable(this.command.dataDir())) { throw GraknException.of(DATA_DIRECTORY_NOT_WRITABLE, this.command.dataDir()); } } private void configureTracing() { if (this.command.grablTrace()) { GrablTracing grablTracingClient; grablTracingClient = GrablTracing.withLogging(GrablTracing.tracing( command.grablURI().toString(), command.grablUsername(), command.grablToken() )); GrablTracingThreadStatic.setGlobalTracingClient(grablTracingClient); LOG.info("Grabl tracing is enabled"); } } private static void printASCIILogo() throws IOException { if (ASCII_LOGO_FILE.exists()) { System.out.println("\n" + new String(Files.readAllBytes(ASCII_LOGO_FILE.toPath()), StandardCharsets.UTF_8)); } } private static Properties parseProperties() { Properties properties = new Properties(); boolean error = false; try { properties.load(new FileInputStream(PROPERTIES_FILE)); } catch (IOException e) { LOG.warn(PROPERTIES_FILE_NOT_FOUND.message(PROPERTIES_FILE.toString())); return new Properties(); } for (Map.Entry<Object, Object> entry : properties.entrySet()) { String val = (String) entry.getValue(); if (val.startsWith("$")) { String envVarName = val.substring(1); if (System.getenv(envVarName) == null) { LOG.error(ENV_VAR_NOT_FOUND.message(val)); error = true; } else { properties.put(entry.getKey(), System.getenv(envVarName)); } } } if (error) throw GraknException.of(FAILED_PARSE_PROPERTIES); else return properties; } private static ServerCommand parseCommandLine(Properties properties, String[] args) { ServerCommand.Start startCommand = new ServerCommand.Start(); ServerCommand.ImportData importDataCommand = new ServerCommand.ImportData(startCommand); ServerCommand.ExportData exportDataCommand = new ServerCommand.ExportData(startCommand); ServerCommand.PrintSchema printSchemaCommand = new ServerCommand.PrintSchema(startCommand); CommandLine commandLine = new CommandLine(startCommand) .addSubcommand(importDataCommand) .addSubcommand(exportDataCommand) .addSubcommand(printSchemaCommand); commandLine.setDefaultValueProvider(new PropertiesDefaultProvider(properties)); try { CommandLine.ParseResult parseResult = commandLine.parseArgs(args); if (commandLine.isUsageHelpRequested()) { commandLine.usage(commandLine.getOut()); return null; } else if (commandLine.isVersionHelpRequested()) { commandLine.printVersionHelp(commandLine.getOut()); return null; } else { if (parseResult.hasSubcommand()) { assert parseResult.subcommand().asCommandLineList().size() == 1; return parseResult.subcommand().asCommandLineList().get(0).getCommand(); } else { assert parseResult.asCommandLineList().size() == 1; return parseResult.asCommandLineList().get(0).getCommand(); } } } catch (ParameterException ex) { commandLine.getErr().println(ex.getMessage()); if (!UnmatchedArgumentException.printSuggestions(ex, commandLine.getErr())) { ex.getCommandLine().usage(commandLine.getErr()); } return null; } } public static void main(String[] args) { try { printASCIILogo(); ServerCommand command = parseCommandLine(parseProperties(), args); if (command == null) System.exit(0); if (command.isStart()) { startGraknServer(command.asStart()); } else if (command.isImportData()) { importData(command.asImportData()); } else if (command.isExportData()) { exportData(command.asExportData()); } else if (command.isPrintSchema()) { ServerCommand.PrintSchema printSchemaCommand = command.asPrintSchema(); printSchema(printSchemaCommand); } } catch (Exception e) { if (e instanceof GraknException) { LOG.error(e.getMessage()); } else { LOG.error(e.getMessage(), e); LOG.error(EXITED_WITH_ERROR.message()); } System.exit(1); } System.exit(0); } private static void printSchema(ServerCommand.PrintSchema printSchemaCommand) { MigratorClient migrator = new MigratorClient(printSchemaCommand.port()); migrator.printSchema(printSchemaCommand.database()); } private static void exportData(ServerCommand.ExportData exportDataCommand) { MigratorClient migrator = new MigratorClient(exportDataCommand.port()); boolean success = migrator.exportData(exportDataCommand.database(), exportDataCommand.filename()); System.exit(success ? 0 : 1); } private static void importData(ServerCommand.ImportData importDataCommand) { MigratorClient migrator = new MigratorClient(importDataCommand.port()); boolean success = migrator.importData(importDataCommand.database(), importDataCommand.filename(), importDataCommand.remapLabels()); System.exit(success ? 0 : 1); } private static void startGraknServer(ServerCommand.Start command) throws IOException { Instant start = Instant.now(); GraknServer server = new GraknServer(command); server.start(); Instant end = Instant.now(); LOG.info("- version: {}", Version.VERSION); LOG.info("- listening to port: {}", server.port()); LOG.info("- data directory configured to: {}", server.dataDir()); LOG.info("- bootup completed in: {} ms", Duration.between(start, end).toMillis()); LOG.info(""); LOG.info("Grakn Core Server is now running and will keep this process alive."); LOG.info("You can press CTRL+C to shutdown this server."); LOG.info("..."); server.serve(); } @Override public void close() { LOG.info(""); LOG.info("Shutting down Grakn Core Server..."); try { graknRPCService.close(); server.shutdown(); server.awaitTermination(); grakn.close(); System.runFinalization(); LOG.info("Grakn Core Server has been shutdown"); } catch (InterruptedException e) { LOG.error(FAILED_AT_STOPPING.message(), e); Thread.currentThread().interrupt(); } } private void start() throws IOException { try { server.start(); } catch (IOException e) { if (e.getCause() != null && e.getCause() instanceof BindException) { throw GraknException.of(ALREADY_RUNNING, port()); } else { throw e; } } } private void serve() { try { server.awaitTermination(); } catch (InterruptedException e) { // server is terminated close(); Thread.currentThread().interrupt(); } } }
import java.io.IOException; import java.io.ByteArrayInputStream; import java.lang.System; import java.net.InetAddress; import java.net.DatagramSocket; import java.net.DatagramPacket; import java.util.Scanner; import java.util.Timer; import java.util.TimerTask; import javax.sound.sampled.AudioFormat; import javax.sound.sampled.AudioInputStream; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.DataLine; import javax.sound.sampled.FloatControl; import javax.sound.sampled.SourceDataLine; /** * class WiMicServer * * Creates a WiMic server and provides interface between * client and speakers */ public class WiMicServer implements Runnable { private final int PORT = 9876; private final String LOCALHOST = "0.0.0.0"; private final String DISC_MESSAGE = "WIMIC_DISCOVER_REQ"; private final String ACK_MESSAGE = "WIMIC_DISCOVER_ACK"; private final String JOIN_MESSAGE = "WIMIC_JOIN_PASSWORD"; private final String JOIN_SUCCESS = "WIMIC_JOIN_SUCCESS"; private final String JOIN_FAIL = "WIMIC_JOIN_FAILURE"; private final String SPEAK_MESSAGE = "WIMIC_SPEAK_REQ"; private final String STOP_SPEAK_MESSAGE = "WIMIC_SPEAK_END"; private final String SPEAK_ACK = "WIMIC_SPEAK_ACK"; private final String SPEAK_NACK = "WIMIC_SPEAK_NACK"; private final String SPEAK_TIMEOUT = "WIMIC_SPEAK_TIMEOUT"; /** * Room name */ private String name = "WiMic Server"; /** * Room PIN */ private int pin; /** * Channel availibity flag */ private boolean isChannelAvailable = true; /** * Time per request in ms */ private int timeout = 60 * 1000; /** * Timer for voice requests */ private Timer timer; /** * Length of PIN */ private final int PIN_LENGTH = 4; /** * Variables used for transmitting voice */ AudioInputStream audioInputStream; static AudioInputStream ais; static AudioFormat format; static boolean status = true; static int sampleRate = 16000; static DataLine.Info dataLineInfo; static SourceDataLine sourceDataLine; static int SPEAK_PORT = 9898; private DatagramSocket voiceSocket; /** * Constructor * * @param name Name of the room * @param pin PIN of the room */ WiMicServer(String name, int pin) { // Ensure first alphabet is capital this.name = name.substring(0, 1).toUpperCase() + name.substring(1); this.pin = pin; } /** * Creates socket and listen for connection */ public void run() { try { DatagramSocket socket = new DatagramSocket(PORT, InetAddress.getByName(LOCALHOST)); socket.setBroadcast(true); System.out.println(name + " is ready. Your pin is: " + pin); receiveVoicePackets(); while (true) { receiveOtherPackets(socket); } } catch (Exception e) { // TODO System.out.println(e); } } /** * Receive packets from clients (other than voice) * * @param socket DatagramSocket object which is binded to port * @throws IOException if cannot receive packets */ private void receiveOtherPackets(DatagramSocket socket) throws IOException { byte[] receiveBuffer = new byte[15000]; DatagramPacket packet = new DatagramPacket( receiveBuffer, receiveBuffer.length ); socket.receive(packet); handleReceivedPacket(socket, packet); } /** * Checks if received packet is discovery packet or join packet * * @param socket DatagramSocket object * @param packet DatagramPacket object * @throws IOException thrown when can't send message */ private void handleReceivedPacket( DatagramSocket socket, DatagramPacket packet ) throws IOException { String message = new String(packet.getData()).trim(); if (message.equals(DISC_MESSAGE)) { System.out.println("Discovery packet received from " + packet.getAddress()); sendDiscoveryAck(socket, packet); } else if (message.contains(JOIN_MESSAGE)) { System.out.println("Join packet received from " + packet.getAddress()); sendJoinACK(socket, packet); } else if (message.equals(SPEAK_MESSAGE)) { System.out.println("Speak message received"); sendSpeakACK(socket, packet); } else if (message.equals(STOP_SPEAK_MESSAGE)) { System.out.println("Stop speak message received"); isChannelAvailable = true; // Cancel the timer timer.cancel(); timer.purge(); } } /** * Send ACK of discovery packet * * @param socket DatagramSocket object * @param packet DatagramPacket object * @throws IOException thrown when can't send message */ private void sendDiscoveryAck(DatagramSocket socket, DatagramPacket packet) throws IOException { byte[] sendData = (ACK_MESSAGE + ";" + this.name).getBytes(); DatagramPacket sendPacket = new DatagramPacket( sendData, sendData.length, packet.getAddress(), packet.getPort() ); socket.send(sendPacket); System.out.println("Sent discovery ACK"); } /** * Send ACK of join packet * * @param socket DatagramSocket object * @param packet DatagramPacket object * @throws IOException thrown when can't send message */ private void sendJoinACK(DatagramSocket socket, DatagramPacket packet) throws IOException { String message = new String(packet.getData()).trim(); String[] request = message.split(";"); byte[] sendData; if (request.length >= 2 && validatePin(request[1])) { sendData = JOIN_SUCCESS.getBytes(); System.out.println("PIN success!"); } else { sendData = JOIN_FAIL.getBytes(); System.out.println("Invalid PIN"); } DatagramPacket sendPacket = new DatagramPacket( sendData, sendData.length, packet.getAddress(), packet.getPort() ); socket.send(sendPacket); } /** * Check and send speak message acknowledgement */ private void sendSpeakACK(DatagramSocket socket, DatagramPacket packet) throws IOException { if (isChannelAvailable) { isChannelAvailable = false; setTimeout(); // Inform client after timeout System.out.println("Sending ACK of channel available"); sendMessage(socket, packet, SPEAK_ACK); } else { System.out.println("Sending NACK of channel available"); sendMessage(socket, packet, SPEAK_NACK); } } /** * Send message to client */ private void sendMessage( DatagramSocket socket, DatagramPacket packet, String message ) throws IOException { byte[] sendData = message.getBytes(); DatagramPacket sendPacket = new DatagramPacket( sendData, sendData.length, packet.getAddress(), packet.getPort() ); socket.send(sendPacket); } /** * Add timeout for each request */ private void setTimeout() { timer = new Timer(); timer.schedule(new TimerTask(){ @Override public void run() { System.out.println("Timeout has occurred"); isChannelAvailable = true; // Assuming client respects our requests } }, timeout); } /** * Checks if a pin is valid * * @param pin Given pin as String * @return true if valid, else false */ private boolean validatePin(String pin) { String regex = "\\d+"; int pinInt; if (pin.matches(regex) && pin.length() == PIN_LENGTH) { pinInt = Integer.parseInt(pin); if (this.pin == pinInt) { return true; } } return false; } /** * Listens for voice packets */ private void receiveVoicePackets() { new Thread(new Runnable() { public void run() { try { voiceSocket = new DatagramSocket( SPEAK_PORT, InetAddress.getByName(LOCALHOST) ); byte[] receiveData = new byte[3800]; initialize(); DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length); ByteArrayInputStream baiss = new ByteArrayInputStream(receivePacket.getData()); while (status == true) { voiceSocket.receive(receivePacket); if (isChannelAvailable) { sendMessage(voiceSocket, receivePacket, SPEAK_TIMEOUT); } ais = new AudioInputStream(baiss, format, receivePacket.getLength()); toSpeaker(receivePacket.getData()); } sourceDataLine.drain(); sourceDataLine.close(); } catch (Exception e) { System.out.println("Exception!"); e.printStackTrace(); } } }).start(); } /** * Initializes audio */ public static void initialize() throws Exception { format = new AudioFormat(sampleRate, 16, 1, true, false); dataLineInfo = new DataLine.Info(SourceDataLine.class, format); sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo); sourceDataLine.open(format); sourceDataLine.start(); FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN); volumeControl.setValue(1.00f); } /** * Transmits data to speakers * * @param soundBytes bytes to write to speakers */ public static void toSpeaker(byte[] soundBytes) { try { sourceDataLine.write(soundBytes, 0, soundBytes.length); } catch (Exception e) { System.out.println("Cannot send data to speakers"); e.printStackTrace(); } } /** * Creates new server instance * * @param args Command line args */ public static void main(String[] args) { Scanner sc; sc = new Scanner(System.in); String name = new String(); boolean validName = false; while (!validName) { System.out.println("Enter room name:"); name = sc.nextLine(); name = name.trim(); if (name.length() > 0 && name.length() <= 12) { validName = true; } else { System.out.println("Name must be between 1 to 12 chars"); } } // Generate a random pin double rand = Math.random() * 8999 + 1000; Thread serverThread = new Thread(new WiMicServer(name, (int) rand)); serverThread.start(); } }
package ecologylab.bigsemantics.dpool; import java.io.File; import java.util.HashMap; import java.util.Map; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.glassfish.hk2.utilities.binding.AbstractBinder; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ecologylab.bigsemantics.Configs; import ecologylab.bigsemantics.Configurable; import ecologylab.bigsemantics.Utils; import ecologylab.bigsemantics.dpool.resources.Echo; import ecologylab.bigsemantics.dpool.resources.LogService; import ecologylab.bigsemantics.dpool.resources.PageService; import ecologylab.bigsemantics.dpool.resources.TaskService; import ecologylab.bigsemantics.service.AbstractServiceApplication; import ecologylab.bigsemantics.service.ServiceParams; import ecologylab.serialization.formatenums.Format; /** * Glues different components of the service together. * * @author quyin */ public class DownloaderPoolApplication extends AbstractServiceApplication implements Configurable, DpoolConfigNames { static Logger logger; static { logger = LoggerFactory.getLogger(DownloaderPoolApplication.class); } private Configuration configs; private Controller controller; private RemoteCurlDownloaderList downloaders; public Controller getController() { return controller; } @Override public void configure(Configuration configuration) throws Exception { this.configs = configuration; // set up controller controller = new Controller(); controller.configure(configs); // set up downloader(s) String downloadersFilePath = configuration.getString(DOWNLOADERS_FILE, null); File file = downloadersFilePath == null ? null : new File(downloadersFilePath); if (file == null || !file.exists() || !file.isFile()) { LocalDownloader localDownloader = new LocalDownloader("local-downloader", 4); controller.getDispatcher().addWorker(localDownloader); } else { downloaders = (RemoteCurlDownloaderList) MessageScope.get().deserialize(file, Format.JSON); Thread downloaderInitThread = new Thread(new Runnable() { @Override public void run() { for (RemoteCurlDownloader downloader : downloaders.getDownloaders()) { downloader.copyFrom(downloaders.getDefaultConfig()); try { downloader.initialize(); controller.getDispatcher().addWorker(downloader); logger.info("Successfully added {} as a downloader", downloader); } catch (Exception e) { logger.error("Failed to initialize " + downloader, e); } } } }, "downloader-init-thread"); downloaderInitThread.start(); } } @Override public Configuration getConfiguration() { return configs; } @Override public Handler createHandler() throws ConfigurationException { // set up jersey servlet ResourceConfig config = new ResourceConfig(); // we package everything into a runnable jar using OneJAR, which provides its own class loader. // as the result, Jersey classpath scanning won't work properly for now. // hopefully this can be fixed soon. right now we need to specify classes. config.register(Echo.class); config.register(LogService.class); config.register(PageService.class); config.register(TaskService.class); // binder for HK2 to inject the controller to Jersey resource instances config.register(new AbstractBinder() { @Override protected void configure() { bind(controller).to(Controller.class); } }); ServletContainer container = new ServletContainer(config); ServletContextHandler handler = new ServletContextHandler(); handler.setContextPath("/DownloaderPool");
package com.jivesoftware.os.amza.client.http; import com.fasterxml.jackson.databind.ObjectMapper; import com.jivesoftware.os.amza.api.filer.FilerInputStream; import com.jivesoftware.os.amza.api.filer.UIO; import com.jivesoftware.os.amza.api.partition.PartitionName; import com.jivesoftware.os.amza.api.partition.PartitionProperties; import com.jivesoftware.os.amza.api.ring.RingHost; import com.jivesoftware.os.amza.api.ring.RingMember; import com.jivesoftware.os.amza.api.ring.RingMemberAndHost; import com.jivesoftware.os.mlogger.core.MetricLogger; import com.jivesoftware.os.mlogger.core.MetricLoggerFactory; import com.jivesoftware.os.routing.bird.http.client.ConnectionDescriptorSelectiveStrategy; import com.jivesoftware.os.routing.bird.http.client.HttpClientException; import com.jivesoftware.os.routing.bird.http.client.HttpResponse; import com.jivesoftware.os.routing.bird.http.client.HttpStreamResponse; import com.jivesoftware.os.routing.bird.http.client.RoundRobinStrategy; import com.jivesoftware.os.routing.bird.http.client.TenantAwareHttpClient; import com.jivesoftware.os.routing.bird.shared.ClientCall; import com.jivesoftware.os.routing.bird.shared.HostPort; import com.jivesoftware.os.routing.bird.shared.NextClientStrategy; import java.io.IOException; import java.util.Optional; /** * @author jonathan.colt */ public class HttpPartitionHostsProvider implements PartitionHostsProvider { private static final MetricLogger LOG = MetricLoggerFactory.getLogger(); private final TenantAwareHttpClient<String> tenantAwareHttpClient; private final ObjectMapper mapper; private final RoundRobinStrategy roundRobinStrategy = new RoundRobinStrategy(); public HttpPartitionHostsProvider(TenantAwareHttpClient<String> tenantAwareHttpClient, ObjectMapper mapper) { this.tenantAwareHttpClient = tenantAwareHttpClient; this.mapper = mapper; } @Override public void ensurePartition(PartitionName partitionName, int desiredRingSize, PartitionProperties partitionProperties) throws Exception { String base64PartitionName = partitionName.toBase64(); String partitionPropertiesString = mapper.writeValueAsString(partitionProperties); byte[] intBuffer = new byte[4]; Ring partitionsRing = tenantAwareHttpClient.call("", roundRobinStrategy, "configPartition", (client) -> { HttpStreamResponse got = client.streamingPost("/amza/v1/configPartition/" + base64PartitionName + "/" + desiredRingSize, partitionPropertiesString, null); try { if (got.getStatusCode() >= 200 && got.getStatusCode() < 300) { try { FilerInputStream fis = new FilerInputStream(got.getInputStream()); int ringSize = UIO.readInt(fis, "ringSize", intBuffer); int leaderIndex = -1; RingMemberAndHost[] ring = new RingMemberAndHost[ringSize]; for (int i = 0; i < ringSize; i++) { RingMember ringMember = RingMember.fromBytes(UIO.readByteArray(fis, "ringMember", intBuffer)); RingHost ringHost = RingHost.fromBytes(UIO.readByteArray(fis, "ringHost", intBuffer)); ring[i] = new RingMemberAndHost(ringMember, ringHost); if (UIO.readBoolean(fis, "leader")) { if (leaderIndex == -1) { leaderIndex = i; } else { throw new RuntimeException("We suck! Gave back more than one leader!"); } } } return new ClientCall.ClientResponse<>(new Ring(leaderIndex, ring), true); } catch (Exception x) { throw new RuntimeException("Failed loading routes for " + partitionName, x); } } } finally { got.close(); } throw new RuntimeException("Failed to config partition:" + partitionName); }); HostPort[] orderHostPorts = new HostPort[partitionsRing.members.length]; for (int i = 0; i < orderHostPorts.length; i++) { RingHost ringHost = partitionsRing.members[i].ringHost; orderHostPorts[i] = new HostPort(ringHost.getHost(), ringHost.getPort()); } NextClientStrategy strategy = new ConnectionDescriptorSelectiveStrategy(orderHostPorts); tenantAwareHttpClient.call("", strategy, "ensurePartition", (client) -> { HttpResponse got = client.postJson("/amza/v1/ensurePartition/" + base64PartitionName + "/" + 30_000, // TODO config partitionPropertiesString, null); if (got.getStatusCode() >= 200 && got.getStatusCode() < 300) { return new ClientCall.ClientResponse<>(null, true); } throw new RuntimeException("Failed to ensure partition:" + partitionName); }); } @Override public Ring getPartitionHosts(PartitionName partitionName, Optional<RingMemberAndHost> useHost, long waitForLeaderElection) throws HttpClientException { NextClientStrategy strategy = useHost.map( (RingMemberAndHost value) -> (NextClientStrategy) new ConnectionDescriptorSelectiveStrategy(new HostPort[]{ new HostPort(value.ringHost.getHost(), value.ringHost.getPort()) })).orElse(roundRobinStrategy); byte[] intBuffer = new byte[4]; Ring leaderlessRing = tenantAwareHttpClient.call("", strategy, "getPartitionHosts", (client) -> { HttpStreamResponse got = client.streamingPost("/amza/v1/ring/" + partitionName.toBase64(), "", null); Ring ring = consumeRing(partitionName, got, intBuffer); return new ClientCall.ClientResponse<>(ring, true); }); if (waitForLeaderElection > 0) { RingMemberAndHost[] actualRing = leaderlessRing.actualRing(); HostPort[] chooseFrom = new HostPort[actualRing.length]; for (int i = 0; i < chooseFrom.length; i++) { RingHost ringHost = actualRing[i].ringHost; chooseFrom[i] = new HostPort(ringHost.getHost(), ringHost.getPort()); } strategy = new ConnectionDescriptorSelectiveStrategy(chooseFrom); return tenantAwareHttpClient.call("", strategy, "ringLeader", (client) -> { HttpStreamResponse got = client.streamingPost("/amza/v1/ringLeader/" + partitionName.toBase64() + "/" + waitForLeaderElection, "", null); Ring ring = consumeRing(partitionName, got, intBuffer); return new ClientCall.ClientResponse<>(ring, true); }); } else { return leaderlessRing; } } private Ring consumeRing(PartitionName partitionName, HttpStreamResponse got, byte[] intBuffer) { try { if (got.getStatusCode() >= 200 && got.getStatusCode() < 300) { FilerInputStream fis = null; try { fis = new FilerInputStream(got.getInputStream()); int ringSize = UIO.readInt(fis, "ringSize", intBuffer); int leaderIndex = -1; RingMemberAndHost[] ring = new RingMemberAndHost[ringSize]; for (int i = 0; i < ringSize; i++) { RingMember ringMember = RingMember.fromBytes(UIO.readByteArray(fis, "ringMember", intBuffer)); RingHost ringHost = RingHost.fromBytes(UIO.readByteArray(fis, "ringHost", intBuffer)); ring[i] = new RingMemberAndHost(ringMember, ringHost); if (UIO.readBoolean(fis, "leader")) { if (leaderIndex == -1) { leaderIndex = i; } else { throw new RuntimeException("We suck! Gave back more than one leader!"); } } } return new Ring(leaderIndex, ring); } catch (Exception x) { throw new RuntimeException("Failed loading routes for " + partitionName, x); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { LOG.warn("Failed to close input stream", e); } } } } } finally { got.close(); } throw new RuntimeException("No routes to partition:" + partitionName); } }
package pl.temomuko.autostoprace.ui.teamslocationsmap; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.os.Parcelable; import android.support.annotation.NonNull; import android.support.v7.widget.CardView; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.Toast; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.SupportMapFragment; import com.google.maps.android.clustering.ClusterManager; import org.greenrobot.eventbus.EventBus; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import java.util.ArrayList; import java.util.List; import javax.inject.Inject; import butterknife.Bind; import me.zhanghai.android.materialprogressbar.MaterialProgressBar; import pl.temomuko.autostoprace.Constants; import pl.temomuko.autostoprace.R; import pl.temomuko.autostoprace.data.Event; import pl.temomuko.autostoprace.data.model.LocationRecord; import pl.temomuko.autostoprace.data.model.Team; import pl.temomuko.autostoprace.ui.base.drawer.DrawerActivity; import pl.temomuko.autostoprace.ui.main.MainActivity; import pl.temomuko.autostoprace.ui.teamslocationsmap.adapter.map.LocationRecordClusterItem; import pl.temomuko.autostoprace.ui.teamslocationsmap.adapter.map.LocationRecordClusterRenderer; import pl.temomuko.autostoprace.ui.teamslocationsmap.adapter.map.TeamLocationInfoWindowAdapter; import pl.temomuko.autostoprace.ui.teamslocationsmap.adapter.searchteamview.SearchTeamView; import pl.temomuko.autostoprace.util.IntentUtil; import pl.temomuko.autostoprace.util.LogUtil; import pl.temomuko.autostoprace.util.rx.RxCacheHelper; import pl.temomuko.autostoprace.util.rx.RxUtil; import rx.Observable; import rx.Subscription; public class TeamsLocationsMapActivity extends DrawerActivity implements TeamsLocationsMapMvpView { private static final String TAG = TeamsLocationsMapActivity.class.getSimpleName(); private static final String BUNDLE_CURRENT_TEAM_LOCATIONS = "bundle_current_team_locations"; private static final String BUNDLE_SEARCH_TEAM_VIEW = "bundle_team_list_hints"; private static final float DEFAULT_MAP_ZOOM = 5.5f; private final static String RX_CACHE_ALL_TEAMS_TAG = "rx_cache_all_teams_tag"; public static final String RX_CACHE_TEAM_LOCATIONS_TAG = "rx_cache_team_locations_tag"; @Inject TeamsLocationsMapPresenter mTeamsLocationsMapPresenter; @Inject TeamLocationInfoWindowAdapter mTeamsLocationInfoWindowAdapter; @Bind(R.id.horizontal_progress_bar) MaterialProgressBar mMaterialProgressBar; @Bind(R.id.search_team_view) SearchTeamView mSearchTeamView; @Bind(R.id.rv_team_hints) RecyclerView mTeamHintsRecyclerView; @Bind(R.id.card_team_hints) CardView mTeamHintsLinearLayout; private boolean mAllTeamsProgressState = false; private boolean mTeamProgressState = false; private boolean mAnimateTeamLocationsUpdate = true; private GoogleMap mMap; private Subscription mSetHintsSubscription; private Subscription mSetLocationsSubscription; private ClusterManager<LocationRecordClusterItem> mClusterManager; private List<LocationRecord> mCurrentTeamLocations; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_teams_location); getActivityComponent().inject(this); setupPresenter(); setupSearchTeamView(); setupIntent(getIntent()); setupMapFragment(); } @Override protected void onStart() { super.onStart(); EventBus.getDefault().register(this); } @Override protected void onResume() { super.onResume(); mTeamsLocationsMapPresenter.loadAllTeams(); } @Override protected void onStop() { super.onStop(); EventBus.getDefault().unregister(this); } @Override protected void onDestroy() { mTeamsLocationsMapPresenter.detachView(); if (mSetHintsSubscription != null) mSetHintsSubscription.unsubscribe(); if (mSetLocationsSubscription != null) mSetLocationsSubscription.unsubscribe(); super.onDestroy(); } @Override protected void onSaveInstanceState(Bundle outState) { if (mCurrentTeamLocations != null) { outState.putParcelableArray(BUNDLE_CURRENT_TEAM_LOCATIONS, mCurrentTeamLocations.toArray(new LocationRecord[mCurrentTeamLocations.size()])); } outState.putBundle(BUNDLE_SEARCH_TEAM_VIEW, mSearchTeamView.saveHintsState()); super.onSaveInstanceState(outState); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); mSearchTeamView.restoreHintState(savedInstanceState.getBundle(BUNDLE_SEARCH_TEAM_VIEW)); Parcelable[] parcelableCurrentTeamLocations = savedInstanceState.getParcelableArray(BUNDLE_CURRENT_TEAM_LOCATIONS); if (parcelableCurrentTeamLocations != null) { mCurrentTeamLocations = new ArrayList<>(parcelableCurrentTeamLocations.length); for (Parcelable parcelable : parcelableCurrentTeamLocations) { mCurrentTeamLocations.add((LocationRecord) parcelable); } } } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_teams_locations_map, menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_search: handleActionSearch(); return true; case R.id.action_share_map: shareMap(); return true; } return super.onOptionsItemSelected(item); } @Override public void onBackPressed() { if (mSearchTeamView.hasFocus()) { mSearchTeamView.clearFocus(); } else { super.onBackPressed(); } } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setupIntent(intent); } private void setupPresenter() { mTeamsLocationsMapPresenter.setupRxCacheHelper(this, RxCacheHelper.get(RX_CACHE_ALL_TEAMS_TAG), RxCacheHelper.get(RX_CACHE_TEAM_LOCATIONS_TAG)); mTeamsLocationsMapPresenter.attachView(this); mTeamsLocationsMapPresenter.setupUserInfoInDrawer(); } private void setupSearchTeamView() { mSearchTeamView.setHintsRecyclerView(mTeamHintsRecyclerView); mSearchTeamView.setOptionalHintsView(mTeamHintsLinearLayout); mSearchTeamView.setOnTeamRequestedListener(new SearchTeamView.OnTeamRequestedListener() { @Override public void onTeamRequest(int teamId) { mTeamsLocationsMapPresenter.loadTeam(teamId); } @Override public void onTeamRequest(String teamString) { mTeamsLocationsMapPresenter.loadTeam(teamString); } }); } private void setupIntent(Intent intent) { Uri data = intent.getData(); Bundle extras = intent.getExtras(); if (data != null) { String teamNumberParameterValue = data.getQueryParameter(Constants.URL_MAP_TEAM_NUMBER_PARAM); if (teamNumberParameterValue != null) { changeTeamFromIntent(teamNumberParameterValue); } } else if (extras != null) { changeTeam(extras.getInt(MainActivity.EXTRA_TEAM_NUMBER)); } } private void changeTeamFromIntent(@NonNull String teamNumberParameterValue) { try { int teamNumber = Integer.parseInt(teamNumberParameterValue.replaceAll("[\\D]", "")); changeTeam(teamNumber); } catch (NumberFormatException e) { Log.e(TAG, "Invalid url query param."); } } private void changeTeam(int teamNumber) { mSearchTeamView.setText(String.valueOf(teamNumber)); mTeamsLocationsMapPresenter.loadTeam(teamNumber); } private void setupMapFragment() { SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map_fragment); mapFragment.getMapAsync(googleMap -> { mMap = googleMap; mSearchTeamView.setEnabled(true); setupClusterManager(); if (mCurrentTeamLocations != null) { mAnimateTeamLocationsUpdate = false; setLocations(mCurrentTeamLocations); } }); } private void setupClusterManager() { mClusterManager = new ClusterManager<>(getApplicationContext(), mMap); mMap.setOnCameraChangeListener(mClusterManager); mMap.setInfoWindowAdapter(mClusterManager.getMarkerManager()); mClusterManager.setRenderer(new LocationRecordClusterRenderer(getApplicationContext(), mMap, mClusterManager)); setCustomClusterWindowAdapter(); } private void setCustomClusterWindowAdapter() { mClusterManager.getMarkerCollection().setOnInfoWindowAdapter(mTeamsLocationInfoWindowAdapter); mClusterManager.getClusterMarkerCollection().setOnInfoWindowAdapter(mTeamsLocationInfoWindowAdapter); } private void handleActionSearch() { if (mSearchTeamView.hasFocus()) { mSearchTeamView.requestSearch(); } else { mSearchTeamView.openSearch(); } } private void shareMap() { IntentUtil.shareLocationsMap(this, mSearchTeamView.getText().toString()); } /* MVP View methods */ @Override public void setAllTeamsProgress(boolean allTeamsProgressState) { mAllTeamsProgressState = allTeamsProgressState; mMaterialProgressBar.setVisibility( allTeamsProgressState || mTeamProgressState ? View.VISIBLE : View.INVISIBLE); } @Override public void setTeamProgress(boolean teamProgressState) { mTeamProgressState = teamProgressState; mMaterialProgressBar.setVisibility( teamProgressState || mAllTeamsProgressState ? View.VISIBLE : View.INVISIBLE); } @Override public void setHints(List<Team> teams) { if (mSetHintsSubscription != null) mSetHintsSubscription.unsubscribe(); mSetHintsSubscription = Observable.from(teams) .toSortedList() .compose(RxUtil.applyComputationSchedulers()) .subscribe(mSearchTeamView::setHints); } @Override public void clearCurrentTeamLocations() { if (mClusterManager != null) { mClusterManager.clearItems(); mClusterManager.cluster(); } if (mCurrentTeamLocations != null) { mCurrentTeamLocations.clear(); } } @Override public void setLocations(@NonNull List<LocationRecord> locationRecords) { mCurrentTeamLocations = locationRecords; if (mSetLocationsSubscription != null) mSetLocationsSubscription.unsubscribe(); mSetLocationsSubscription = Observable.from(locationRecords) .map(LocationRecordClusterItem::new) .toSortedList() .compose(RxUtil.applyComputationSchedulers()) .subscribe(this::handleTeamLocationsToSet); } @Override public void showError(String message) { Toast.makeText(getApplicationContext(), message, Toast.LENGTH_SHORT).show(); } @Override public void showInvalidFormatError() { Toast.makeText(this, getString(R.string.error_invalid_team_id), Toast.LENGTH_SHORT).show(); } @Override public void showNoLocationRecordsInfo() { Toast.makeText(this, R.string.msg_no_location_records_to_display, Toast.LENGTH_SHORT).show(); } /* Private helper methods */ private void handleTeamLocationsToSet(List<LocationRecordClusterItem> locationRecordClusterItems) { if (mClusterManager != null) { mClusterManager.addItems(locationRecordClusterItems); mClusterManager.cluster(); if (mAnimateTeamLocationsUpdate) { if (!locationRecordClusterItems.isEmpty()) { mMap.animateCamera(CameraUpdateFactory .newLatLngZoom(locationRecordClusterItems.get(0).getPosition(), DEFAULT_MAP_ZOOM)); } } else { mAnimateTeamLocationsUpdate = true; } } } /* Events */ @SuppressWarnings("unused") @Subscribe(threadMode = ThreadMode.MAIN) public void onNetworkConnected(Event.NetworkConnected event) { LogUtil.i(TAG, "received network connected event"); mTeamsLocationsMapPresenter.loadAllTeams(); } }
// -*- mode: java; c-basic-offset: 2; -*- package com.google.appinventor.client.editor.simple.components; import static com.google.appinventor.client.Ode.MESSAGES; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import com.google.appinventor.client.editor.simple.SimpleEditor; import com.google.appinventor.client.editor.simple.components.utils.PropertiesUtil; import com.google.appinventor.client.editor.youngandroid.YaFormEditor; import com.google.appinventor.client.editor.youngandroid.properties.YoungAndroidLengthPropertyEditor; import com.google.appinventor.client.editor.youngandroid.properties.YoungAndroidVerticalAlignmentChoicePropertyEditor; import com.google.appinventor.client.output.OdeLog; import com.google.appinventor.client.properties.BadPropertyEditorException; import com.google.appinventor.client.widgets.properties.EditableProperties; import com.google.appinventor.components.common.ComponentConstants; import com.google.appinventor.shared.settings.SettingsConstants; import com.google.gwt.dom.client.DivElement; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Style; import com.google.gwt.user.client.Timer; import com.google.gwt.user.client.ui.AbsolutePanel; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.DockPanel; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.ScrollPanel; import com.google.gwt.user.client.ui.TreeItem; public final class MockForm extends MockContainer { /* * Widget for the mock form title bar. */ private class TitleBar extends Composite { private static final int TITLEBAR_HEIGHT = 24; private static final int ACTIONBAR_HEIGHT = 56; // UI elements private Label title; private Button menuButton; private AbsolutePanel bar; private boolean actionBar; private String backgroundColor; public String getTitle() { return title.getText(); } /* * Creates a new title bar. */ TitleBar() { title = new Label(); title.setStylePrimaryName("ode-SimpleMockFormTitle"); title.setHorizontalAlignment(Label.ALIGN_LEFT); menuButton = new Button(); menuButton.setText("\u22ee"); menuButton.setStylePrimaryName("ode-SimpleMockFormMenuButton"); bar = new AbsolutePanel(); bar.add(title); bar.add(menuButton); initWidget(bar); setStylePrimaryName("ode-SimpleMockFormTitleBar"); setSize("100%", TITLEBAR_HEIGHT + "px"); } /* * Changes the title in the title bar. */ void changeTitle(String newTitle) { title.setText(newTitle); } void setActionBar(boolean actionBar) { this.actionBar = actionBar; setSize("100%", (actionBar ? ACTIONBAR_HEIGHT : TITLEBAR_HEIGHT) + "px"); if (actionBar) { addStyleDependentName("ActionBar"); MockComponentsUtil.setWidgetBackgroundColor(titleBar.bar, backgroundColor); } else { removeStyleDependentName("ActionBar"); MockComponentsUtil.setWidgetBackgroundColor(titleBar.bar, "&HFF696969"); } } void setBackgroundColor(String color) { this.backgroundColor = color; if (actionBar) { MockComponentsUtil.setWidgetBackgroundColor(titleBar.bar, color); } } int getHeight() { return actionBar ? ACTIONBAR_HEIGHT : TITLEBAR_HEIGHT; } } /* * Widget for a mock phone status bar. */ private class PhoneBar extends Composite { private static final int HEIGHT = 24; // UI elements private DockPanel bar; private Image phoneBarImage; /* * Creates a new phone status bar. */ PhoneBar() { phoneBarImage = new Image(images.phonebar()); bar = new DockPanel(); bar.setHorizontalAlignment(HorizontalPanel.ALIGN_RIGHT); bar.add(phoneBarImage, DockPanel.EAST); initWidget(bar); setStylePrimaryName("ode-SimpleMockFormPhoneBar"); setSize("100%", HEIGHT + "px"); } } /* * * Widget for a mock phone navigation bar; Shows at the bottom of the viewer */ private class NavigationBar extends Composite { private static final int HEIGHT = 44; // UI elements private AbsolutePanel bar; /* * Creates a new phone navigation bar; Shows at the bottom of the viewer. */ NavigationBar() { bar = new AbsolutePanel(); initWidget(bar); setStylePrimaryName("ode-SimpleMockFormNavigationBarPortrait"); } public int getHeight() { return HEIGHT; } } /** * Component type name. */ public static final String TYPE = "Form"; private static final String VISIBLE_TYPE = "Screen"; // Currently App Inventor provides two main sizes that correspond to 'normal' and 'large' // screens. We use phone=normal (470 x 320 DP) and tablet=large (640 x 480 DP). // More information about 'bucket' sizes at: // The values for Phone and Tablet were decided by trial and error. The main reason is that in // the designer we use sizes of GWT widgets, and not the sizes of the actual Android widgets. private static final int PHONE_PORTRAIT_WIDTH = 320; private static final int PHONE_PORTRAIT_HEIGHT = 470 + 35; // Adds 35 for the navigation bar private static final int PHONE_LANDSCAPE_WIDTH = PHONE_PORTRAIT_HEIGHT; private static final int PHONE_LANDSCAPE_HEIGHT = PHONE_PORTRAIT_WIDTH; private static final int TABLET_PORTRAIT_WIDTH = 480; private static final int TABLET_PORTRAIT_HEIGHT = 640 + 35; // Adds 35 for the navigation bar private static final int TABLET_LANDSCAPE_WIDTH = TABLET_PORTRAIT_HEIGHT; private static final int TABLET_LANDSCAPE_HEIGHT = TABLET_PORTRAIT_WIDTH; // These are default values but they can be changed in the changePreviewSize method private int PORTRAIT_WIDTH = PHONE_PORTRAIT_WIDTH; private int PORTRAIT_HEIGHT = PHONE_PORTRAIT_HEIGHT; private int LANDSCAPE_WIDTH = PHONE_LANDSCAPE_WIDTH; private int LANDSCAPE_HEIGHT = PHONE_LANDSCAPE_HEIGHT; private boolean landscape = false; private int idxPhoneSize = 0; // Property names private static final String PROPERTY_NAME_TITLE = "Title"; private static final String PROPERTY_NAME_SCREEN_ORIENTATION = "ScreenOrientation"; private static final String PROPERTY_NAME_SCROLLABLE = "Scrollable"; private static final String PROPERTY_NAME_ICON = "Icon"; private static final String PROPERTY_NAME_VCODE = "VersionCode"; private static final String PROPERTY_NAME_VNAME = "VersionName"; private static final String PROPERTY_NAME_ANAME = "AppName"; private static final String PROPERTY_NAME_SIZING = "Sizing"; // Don't show except on screen1 private static final String PROPERTY_NAME_TITLEVISIBLE = "TitleVisible"; // Don't show except on screen1 private static final String PROPERTY_NAME_SHOW_LISTS_AS_JSON = "ShowListsAsJson"; private static final String PROPERTY_NAME_TUTORIAL_URL = "TutorialURL"; private static final String PROPERTY_NAME_BLOCK_SUBSET = "BlocksToolkit"; private static final String PROPERTY_NAME_ACTIONBAR = "ActionBar"; private static final String PROPERTY_NAME_PRIMARY_COLOR = "PrimaryColor"; private static final String PROPERTY_NAME_PRIMARY_COLOR_DARK = "PrimaryColorDark"; private static final String PROPERTY_NAME_ACCENT_COLOR = "AccentColor"; private static final String PROPERTY_NAME_THEME = "Theme"; // Form UI components AbsolutePanel formWidget; AbsolutePanel phoneWidget; AbsolutePanel responsivePanel; ScrollPanel scrollPanel; private TitleBar titleBar; private NavigationBar navigationBar; private MockComponent selectedComponent; int screenWidth; // TEMP: Make package visible so we can use it MockHVLayoutBase private int screenHeight; int usableScreenHeight; // TEMP: Make package visible so we can use it MockHVLayoutBase int usableScreenWidth; // Set of listeners for any changes of the form final HashSet<FormChangeListener> formChangeListeners = new HashSet<FormChangeListener>(); // Don't access the verticalScrollbarWidth field directly. Use getVerticalScrollbarWidth(). private static int verticalScrollbarWidth; private MockFormLayout myLayout; // flag to control attempting to enable/disable vertical // alignment when scrollable property is changed private boolean initialized = false; private YoungAndroidVerticalAlignmentChoicePropertyEditor myVAlignmentPropertyEditor; public static final String PROPERTY_NAME_HORIZONTAL_ALIGNMENT = "AlignHorizontal"; public static final String PROPERTY_NAME_VERTICAL_ALIGNMENT = "AlignVertical"; /** * Creates a new MockForm component. * * @param editor editor of source file the component belongs to */ public MockForm(SimpleEditor editor) { // Note(Hal): This helper thing is a kludge because I really want to write: // myLayout = new MockHVLayout(orientation); // super(editor, type, icon, myLayout); // but Java won't let me do that. super(editor, TYPE, images.form(), MockFormHelper.makeLayout()); // Note(hal): There better not be any calls to MockFormHelper before the // next instruction. Note that the Helper methods are synchronized to avoid possible // future problems if we ever have threads creating forms in parallel. myLayout = MockFormHelper.getLayout(); phoneWidget = new AbsolutePanel(); phoneWidget.setStylePrimaryName("ode-SimpleMockFormPhonePortrait"); formWidget = new AbsolutePanel(); formWidget.setStylePrimaryName("ode-SimpleMockForm"); responsivePanel = new AbsolutePanel(); // Initialize mock form UI by adding the phone bar and title bar. responsivePanel.add(new PhoneBar()); titleBar = new TitleBar(); responsivePanel.add(titleBar); // Put a ScrollPanel around the rootPanel. scrollPanel = new ScrollPanel(rootPanel); responsivePanel.add(scrollPanel); formWidget.add(responsivePanel); //Add navigation bar at the bottom of the viewer. navigationBar = new NavigationBar(); formWidget.add(navigationBar); phoneWidget.add(formWidget); initComponent(phoneWidget); // Set up the initial state of the vertical alignment property editor and its dropdowns try { myVAlignmentPropertyEditor = PropertiesUtil.getVAlignmentEditor(properties); } catch (BadPropertyEditorException e) { OdeLog.log(MESSAGES.badAlignmentPropertyEditorForArrangement()); return; } enableAndDisableDropdowns(); initialized = true; // Now that the default for Scrollable is false, we need to force setting the property when creating the MockForm setScrollableProperty(getPropertyValue(PROPERTY_NAME_SCROLLABLE)); } public void changePreviewSize(int width, int height, int idx) { // It will definitely be modified in the future to add more options. PORTRAIT_WIDTH = width; PORTRAIT_HEIGHT = height; LANDSCAPE_WIDTH = height; LANDSCAPE_HEIGHT = width; idxPhoneSize = idx; setPhoneStyle(); if (landscape) { resizePanel(LANDSCAPE_WIDTH, LANDSCAPE_HEIGHT); } else { resizePanel(width, height); } } private void setPhoneStyle() { if (landscape) { if (idxPhoneSize == 0) phoneWidget.setStylePrimaryName("ode-SimpleMockFormPhoneLandscape"); else if (idxPhoneSize == 1) phoneWidget.setStylePrimaryName("ode-SimpleMockFormPhoneLandscapeTablet"); else if (idxPhoneSize == 2) phoneWidget.setStylePrimaryName("ode-SimpleMockFormPhoneLandscapeMonitor"); navigationBar.setStylePrimaryName("ode-SimpleMockFormNavigationBarLandscape"); } else { if (idxPhoneSize == 0) phoneWidget.setStylePrimaryName("ode-SimpleMockFormPhonePortrait"); else if (idxPhoneSize == 1) phoneWidget.setStylePrimaryName("ode-SimpleMockFormPhonePortraitTablet"); else if (idxPhoneSize == 2) phoneWidget.setStylePrimaryName("ode-SimpleMockFormPhonePortraitMonitor"); navigationBar.setStylePrimaryName("ode-SimpleMockFormNavigationBarPortrait"); } } /* * Resizes the scrollPanel, responsivePanel, and formWidget based on the screen size. */ private void resizePanel(int newWidth, int newHeight){ screenWidth = newWidth; screenHeight = newHeight; if (landscape) { usableScreenWidth = screenWidth - navigationBar.getHeight(); usableScreenHeight = screenHeight - PhoneBar.HEIGHT - titleBar.getHeight(); } else { usableScreenWidth = screenWidth; usableScreenHeight = screenHeight - PhoneBar.HEIGHT - titleBar.getHeight() - navigationBar.getHeight(); } rootPanel.setPixelSize(usableScreenWidth, usableScreenHeight); scrollPanel.setPixelSize(usableScreenWidth + getVerticalScrollbarWidth(), usableScreenHeight); formWidget.setPixelSize(screenWidth + getVerticalScrollbarWidth(), screenHeight); // Store properties changeProperty(PROPERTY_NAME_WIDTH, "" + usableScreenWidth); boolean scrollable = Boolean.parseBoolean(getPropertyValue(PROPERTY_NAME_SCROLLABLE)); if (!scrollable) { changeProperty(PROPERTY_NAME_HEIGHT, "" + usableScreenHeight); } } /* * Returns the width of a vertical scroll bar, calculating it if necessary. */ private static int getVerticalScrollbarWidth() { // We only calculate the vertical scroll bar width once, then we store it in the static field // verticalScrollbarWidth. If the field is non-zero, we don't need to calculate it again. if (verticalScrollbarWidth == 0) { // The following code will calculate (on the fly) the width of a vertical scroll bar. // We'll create two divs, one inside the other and add the outer div to the document body, // but off-screen where the user won't see it. // We'll measure the width of the inner div twice: (first) when the outer div's vertical // scrollbar is hidden and (second) when the outer div's vertical scrollbar is visible. // The width of inner div will be smaller when outer div's vertical scrollbar is visible. // By subtracting the two measurements, we can calculate the width of the vertical scrollbar. // I used code from the following websites as reference material: Document document = Document.get(); // Create an outer div. DivElement outerDiv = document.createDivElement(); Style outerDivStyle = outerDiv.getStyle(); // Use absolute positioning and set the top/left so that it is off-screen. // We don't want the user to see anything while we do this calculation. outerDivStyle.setProperty("position", "absolute"); outerDivStyle.setProperty("top", "-1000px"); outerDivStyle.setProperty("left", "-1000px"); // Set the width and height of the outer div to a fixed size in pixels. outerDivStyle.setProperty("width", "100px"); outerDivStyle.setProperty("height", "50px"); // Hide the outer div's scrollbar by setting the "overflow" property to "hidden". outerDivStyle.setProperty("overflow", "hidden"); // Create an inner div and put it inside the outer div. DivElement innerDiv = document.createDivElement(); Style innerDivStyle = innerDiv.getStyle(); // Set the height of the inner div to be 4 times the height of the outer div so that a // vertical scrollbar will be necessary (but hidden for now) on the outer div. innerDivStyle.setProperty("height", "200px"); outerDiv.appendChild(innerDiv); // Temporarily add the outer div to the document body. It's off-screen so the user won't // actually see anything. Element bodyElement = document.getElementsByTagName("body").getItem(0); bodyElement.appendChild(outerDiv); // Get the width of the inner div while the outer div's vertical scrollbar is hidden. int widthWithoutScrollbar = innerDiv.getOffsetWidth(); // Show the outer div's vertical scrollbar by setting the "overflow" property to "auto". outerDivStyle.setProperty("overflow", "auto"); // Now, get the width of the inner div while the vertical scrollbar is visible. int widthWithScrollbar = innerDiv.getOffsetWidth(); // Remove the outer div from the document body. bodyElement.removeChild(outerDiv); // Calculate the width of the vertical scrollbar by subtracting the two widths. verticalScrollbarWidth = widthWithoutScrollbar - widthWithScrollbar; } return verticalScrollbarWidth; } @Override public final MockForm getForm() { return this; } @Override public boolean isForm() { return true; } @Override public String getVisibleTypeName() { return VISIBLE_TYPE; } @Override protected void addWidthHeightProperties() { addProperty(PROPERTY_NAME_WIDTH, "" + PORTRAIT_WIDTH, null, new YoungAndroidLengthPropertyEditor()); addProperty(PROPERTY_NAME_HEIGHT, "" + LENGTH_PREFERRED, null, new YoungAndroidLengthPropertyEditor()); } @Override public boolean isPropertyPersisted(String propertyName) { // We use the Width and Height properties to make the form appear correctly in the designer, // but they aren't actually persisted to the .scm file. if (propertyName.equals(PROPERTY_NAME_WIDTH) || propertyName.equals(PROPERTY_NAME_HEIGHT)) { return false; } return super.isPropertyPersisted(propertyName); } @Override protected boolean isPropertyVisible(String propertyName) { switch (propertyName) { case PROPERTY_NAME_WIDTH: case PROPERTY_NAME_HEIGHT: case PROPERTY_NAME_ACTIONBAR: { return false; } // The Icon property actually applies to the application and is only visible on Screen1. case PROPERTY_NAME_ICON: // The VersionName property actually applies to the application and is only visible on Screen1. case PROPERTY_NAME_VNAME: // The VersionCode property actually applies to the application and is only visible on Screen1. case PROPERTY_NAME_VCODE: // The Sizing property actually applies to the application and is only visible on Screen1. case PROPERTY_NAME_SIZING: // The AppName property actually applies to the application and is only visible on Screen1. case PROPERTY_NAME_ANAME: // The ShowListsAsJson property actually applies to the application and is only visible on Screen1. case PROPERTY_NAME_SHOW_LISTS_AS_JSON: // The TutorialURL property actually applies to the application and is only visible on Screen1. case PROPERTY_NAME_TUTORIAL_URL: case PROPERTY_NAME_BLOCK_SUBSET: case PROPERTY_NAME_PRIMARY_COLOR: case PROPERTY_NAME_PRIMARY_COLOR_DARK: case PROPERTY_NAME_ACCENT_COLOR: case PROPERTY_NAME_THEME: { return editor.isScreen1(); } default: { return super.isPropertyVisible(propertyName); } } } /* * Sets the form's BackgroundColor property to a new value. */ private void setBackgroundColorProperty(String text) { if (MockComponentsUtil.isNoneColor(text)) { text = "&HFF000000"; // black } else if (MockComponentsUtil.isDefaultColor(text)) { text = "&HFFFFFFFF"; // white } MockComponentsUtil.setWidgetBackgroundColor(rootPanel, text); } /* * Sets the form's BackgroundImage property to a new value. */ private void setBackgroundImageProperty(String text) { String url = convertImagePropertyValueToUrl(text); if (url == null) { // text was not recognized as an asset. url = ""; } MockComponentsUtil.setWidgetBackgroundImage(rootPanel, url); } private void setScreenOrientationProperty(String text) { if (hasProperty(PROPERTY_NAME_WIDTH) && hasProperty(PROPERTY_NAME_HEIGHT) && hasProperty(PROPERTY_NAME_SCROLLABLE)) { if (text.equalsIgnoreCase("landscape")) { screenWidth = LANDSCAPE_WIDTH; screenHeight = LANDSCAPE_HEIGHT; landscape = true; } else { screenWidth = PORTRAIT_WIDTH; screenHeight = PORTRAIT_HEIGHT; landscape = false; } setPhoneStyle(); if (landscape) { usableScreenWidth = screenWidth - navigationBar.getHeight(); usableScreenHeight = screenHeight - PhoneBar.HEIGHT - titleBar.getHeight(); } else { usableScreenWidth = screenWidth; usableScreenHeight = screenHeight - PhoneBar.HEIGHT - titleBar.getHeight() - navigationBar.getHeight(); } resizePanel(screenWidth, screenHeight); changeProperty(PROPERTY_NAME_WIDTH, "" + usableScreenWidth); boolean scrollable = Boolean.parseBoolean(getPropertyValue(PROPERTY_NAME_SCROLLABLE)); if (!scrollable) { changeProperty(PROPERTY_NAME_HEIGHT, "" + usableScreenHeight); } } } private void setScrollableProperty(String text) { if (hasProperty(PROPERTY_NAME_HEIGHT)) { final boolean scrollable = Boolean.parseBoolean(text); int heightHint = scrollable ? LENGTH_PREFERRED : usableScreenHeight; changeProperty(PROPERTY_NAME_HEIGHT, "" + heightHint); } } private void setIconProperty(String icon) { // The Icon property actually applies to the application and is only visible on Screen1. // When we load a form that is not Screen1, this method will be called with the default value // for icon (empty string). We need to ignore that. if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_ICON, icon); } } private void setVCodeProperty(String vcode) { // The VersionCode property actually applies to the application and is only visible on Screen1. // When we load a form that is not Screen1, this method will be called with the default value // for VersionCode (1). We need to ignore that. if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_CODE, vcode); } } private void setVNameProperty(String vname) { // The VersionName property actually applies to the application and is only visible on Screen1. // When we load a form that is not Screen1, this method will be called with the default value // for VersionName (1.0). We need to ignore that. if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_NAME, vname); } } private void setSizingProperty(String sizingProperty) { // The Compatibility property actually applies to the application and is only visible on // Screen1. When we load a form that is not Screen1, this method will be called with the // default value for CompatibilityProperty (false). We need to ignore that. if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING, sizingProperty); } } private void setShowListsAsJsonProperty(String asJson) { // This property actually applies to the application and is only visible on // Screen1. When we load a form that is not Screen1, this method will be called with the // default value for ShowListsAsJsonProperty (false). We need to ignore that. if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_SHOW_LISTS_AS_JSON, asJson); } } private void setTutorialURLProperty(String asJson) { // This property actually applies to the application and is only visible on // Screen1. When we load a form that is not Screen1, this method will be called with the // default value for TutorialURL (""). We need to ignore that. if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_TUTORIAL_URL, asJson); } } private void setBlockSubsetProperty(String asJson) { //This property applies to the application and is only visible on Screen1. When we load a form that is //not Screen1, this method will be called with the default value for SubsetJson (""). We need to ignore that. if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_BLOCK_SUBSET, asJson); if (editor.isLoadComplete()) { ((YaFormEditor)editor).reloadComponentPalette(asJson); } } } private void setANameProperty(String aname) { // The AppName property actually applies to the application and is only visible on Screen1. // When we load a form that is not Screen1, this method will be called with the default value if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_APP_NAME, aname); } } private void setTitleVisibleProperty(String text) { boolean visible = Boolean.parseBoolean(text); titleBar.setVisible(visible); } private void setActionBarProperty(String actionBar) { if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_ACTIONBAR, actionBar); } titleBar.setActionBar(Boolean.parseBoolean(actionBar)); if (initialized) { resizePanel(screenWidth, screenHeight); // update screen due to titlebar size change. } } private void setPrimaryColor(String color) { if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_PRIMARY_COLOR, color); } if (color.equals("&H00000000")) { // Replace Default with actual default color color = ComponentConstants.DEFAULT_PRIMARY_COLOR; } titleBar.setBackgroundColor(color); } private void setPrimaryColorDark(String color) { if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_PRIMARY_COLOR_DARK, color); } } private void setAccentColor(String color) { if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_ACCENT_COLOR, color); } } private void setTheme(String theme) { if (editor.isScreen1()) { editor.getProjectEditor().changeProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_THEME, theme); } if (theme.equals("AppTheme.Light")) { final String newColor = "&HFF000000"; MockComponentsUtil.setWidgetTextColor(titleBar.bar, newColor); MockComponentsUtil.setWidgetTextColor(titleBar.menuButton, newColor); MockComponentsUtil.setWidgetTextColor(titleBar.title, newColor); } else { final String newColor = "&HFFFFFFFF"; MockComponentsUtil.setWidgetTextColor(titleBar.bar, newColor); MockComponentsUtil.setWidgetTextColor(titleBar.menuButton, newColor); MockComponentsUtil.setWidgetTextColor(titleBar.title, newColor); } if (theme.equals("AppTheme")) { formWidget.addStyleDependentName("Dark"); } else { formWidget.removeStyleDependentName("Dark"); } } /** * Forces a re-layout of the child components of the container. * * Each components onPropertyChange listener calls us. This is * reasonable during interactive editing because we have to make * sure the screen reflects what the user is doing. However during * project load we will be called many times, when really we should * only be called after the project's UI is really finished loading. * * We could add a bunch of complicated code to inhibit refreshes * until we know the project's UI is loaded and stable. However that * is a change that will be spread over several modules, making it * hard to understand what is going on. * * Instead, I am opting to keep this change self contained within * this module. The idea is to see how quickly we are being * called. If we receive a call which is close in time (within * seconds) of a previous call, we set a timer to fire in the * reasonable future (say 2 seconds). While this timer is counting * down, we ignore any other calls to refresh. Whatever refreshing * they would do will be handled by the call done when the timer * fires. This approach does not reduce the number of calls to * refresh during project loading to 1. But it significantly reduces * the number of calls and gets us out of the exponential explosion * in time and memory that we see with projects with hundreds of * design elements (yes, people do that, and I have seen at least * one project that was this big and reasonable!). -Jeff Schiller * (jis@mit.edu). * */ private Timer refreshTimer = null; public final void refresh() { if (refreshTimer != null) return; refreshTimer = new Timer() { @Override public void run() { doRefresh(); refreshTimer = null; } }; refreshTimer.schedule(0); } /* * Do the actual refresh. * * This method is public because it is called directly from MockComponent for refreshes * which bypass throttling. * */ public final void doRefresh() { Map<MockComponent, LayoutInfo> layoutInfoMap = new HashMap<MockComponent, LayoutInfo>(); collectLayoutInfos(layoutInfoMap, this); LayoutInfo formLayoutInfo = layoutInfoMap.get(this); layout.layoutChildren(formLayoutInfo); rootPanel.setPixelSize(formLayoutInfo.width, Math.max(formLayoutInfo.height, usableScreenHeight)); for (LayoutInfo layoutInfo : layoutInfoMap.values()) { layoutInfo.cleanUp(); } layoutInfoMap.clear(); } /* * Collects the LayoutInfo of the given component and, recursively, all of * its children. * * If a component's width/height hint is automatic, the corresponding * LayoutInfo's width/height will be set to the calculated width/height. * If a component's width/height hint is fill parent, the corresponding * LayoutInfo's width/height may be set to fill parent. This will be resolved * when layoutChildren is called. */ private static void collectLayoutInfos(Map<MockComponent, LayoutInfo> layoutInfoMap, MockComponent component) { LayoutInfo layoutInfo = component.createLayoutInfo(layoutInfoMap); // If this component is a container, collect the LayoutInfos of its children. if (component instanceof MockContainer) { if (!layoutInfo.visibleChildren.isEmpty()) { // We resize the container to be very large so that we get accurate // results when we ask for a child's size using getOffsetWidth/getOffsetHeight. // If the container is its normal size (or perhaps the default empty // size), then the browser won't give us anything bigger than that // when we ask for a child's size. if (component.isForm()) { ((MockForm) component).rootPanel.setPixelSize(1000, 1000); } else { component.setPixelSize(1000, 1000); } // Show children that should be shown and collect their layoutInfos. // Note that some MockLayout implementations may hide children that are in the // visibleChildren list. For example, in MockTableLayout, if two or more children occupy // the same cell in the table, all but one of the children are hidden. for (MockComponent child : layoutInfo.visibleChildren) { child.setVisible(true); collectLayoutInfos(layoutInfoMap, child); } } // Hide children that should be hidden. for (MockComponent child : component.getHiddenVisibleChildren()) { child.setVisible(false); } } layoutInfo.gatherDimensions(); } /** * Adds an {@link FormChangeListener} to the listener set if it isn't already in there. * * @param listener the {@code FormChangeListener} to be added */ public void addFormChangeListener(FormChangeListener listener) { formChangeListeners.add(listener); } /** * Removes an {@link FormChangeListener} from the listener list. * * @param listener the {@code FormChangeListener} to be removed */ public void removeFormChangeListener(FormChangeListener listener) { formChangeListeners.remove(listener); } /** * Triggers a component property change event to be sent to the listener on the listener list. */ protected void fireComponentPropertyChanged(MockComponent component, String propertyName, String propertyValue) { for (FormChangeListener listener : formChangeListeners) { listener.onComponentPropertyChanged(component, propertyName, propertyValue); } } /** * Triggers a component removed event to be sent to the listener on the listener list. */ protected void fireComponentRemoved(MockComponent component, boolean permanentlyDeleted) { for (FormChangeListener listener : formChangeListeners) { listener.onComponentRemoved(component, permanentlyDeleted); } } /** * Triggers a component added event to be sent to the listener on the listener list. */ protected void fireComponentAdded(MockComponent component) { for (FormChangeListener listener : formChangeListeners) { listener.onComponentAdded(component); } } /** * Triggers a component renamed event to be sent to the listener on the listener list. */ protected void fireComponentRenamed(MockComponent component, String oldName) { for (FormChangeListener listener : formChangeListeners) { listener.onComponentRenamed(component, oldName); } } /** * Triggers a component selection change event to be sent to the listener on the listener list. */ protected void fireComponentSelectionChange(MockComponent component, boolean selected) { for (FormChangeListener listener : formChangeListeners) { listener.onComponentSelectionChange(component, selected); } } /** * Changes the component that is currently selected in the form. * <p> * There will always be exactly one component selected in a form * at any given time. */ public final void setSelectedComponent(MockComponent newSelectedComponent) { MockComponent oldSelectedComponent = selectedComponent; if (newSelectedComponent == null) { throw new IllegalArgumentException("at least one component must always be selected"); } YaFormEditor formEditor = (YaFormEditor) editor; boolean shouldSelectMultipleComponents = formEditor.getShouldSelectMultipleComponents(); List<MockComponent> selectedComponents = formEditor.getSelectedComponents(); if (selectedComponents.size() == 1 && selectedComponents.contains(newSelectedComponent)) { // Attempting to change the selection from old to new when they are the same breaks return; } if (shouldSelectMultipleComponents && selectedComponents.size() > 1 && formEditor.isSelectedComponent(newSelectedComponent)) { int index = selectedComponents.indexOf(newSelectedComponent); selectedComponent = selectedComponents.get((index == 0) ? 1 : index - 1); newSelectedComponent.onSelectedChange(false); return; } selectedComponent = newSelectedComponent; Map<String, MockComponent> componentsMap = formEditor.getComponents(); if (oldSelectedComponent != null && !shouldSelectMultipleComponents) { // Can be null initially for (MockComponent component : componentsMap.values()) { if (component.getName() != selectedComponent.getName()) { component.onSelectedChange(false); } } } newSelectedComponent.onSelectedChange(true); } public final MockComponent getSelectedComponent() { return selectedComponent; } /** * Builds a tree of the component hierarchy of the form for display in the * {@code SourceStructureExplorer}. * * @return tree showing the component hierarchy of the form */ public TreeItem buildComponentsTree() { return buildTree(); } // PropertyChangeListener implementation @Override public void onPropertyChange(String propertyName, String newValue) { super.onPropertyChange(propertyName, newValue); // Apply changed properties to the mock component if (propertyName.equals(PROPERTY_NAME_BACKGROUNDCOLOR)) { setBackgroundColorProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_BACKGROUNDIMAGE)) { setBackgroundImageProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_SCREEN_ORIENTATION)) { setScreenOrientationProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_SCROLLABLE)) { setScrollableProperty(newValue); adjustAlignmentDropdowns(); } else if (propertyName.equals(PROPERTY_NAME_TITLE)) { titleBar.changeTitle(newValue); } else if (propertyName.equals(PROPERTY_NAME_SIZING)) { if (newValue.equals("Fixed")){ // Disable Tablet Preview editor.getVisibleComponentsPanel().enableTabletPreviewCheckBox(false); } else { editor.getVisibleComponentsPanel().enableTabletPreviewCheckBox(true); } setSizingProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_ICON)) { setIconProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_VCODE)) { setVCodeProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_VNAME)) { setVNameProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_ANAME)) { setANameProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_SHOW_LISTS_AS_JSON)) { setShowListsAsJsonProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_TUTORIAL_URL)) { setTutorialURLProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_BLOCK_SUBSET)) { setBlockSubsetProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_ACTIONBAR)) { setActionBarProperty(newValue); } else if (propertyName.equals(PROPERTY_NAME_THEME)) { setTheme(newValue); if ("Classic".equals(newValue)) { getProperties().getExistingProperty(PROPERTY_NAME_ACTIONBAR).setValue("False"); } else { getProperties().getExistingProperty(PROPERTY_NAME_ACTIONBAR).setValue("True"); } } else if (propertyName.equals(PROPERTY_NAME_PRIMARY_COLOR)) { setPrimaryColor(newValue); } else if (propertyName.equals(PROPERTY_NAME_PRIMARY_COLOR_DARK)) { setPrimaryColorDark(newValue); } else if (propertyName.equals(PROPERTY_NAME_ACCENT_COLOR)) { setAccentColor(newValue); } else if (propertyName.equals(PROPERTY_NAME_HORIZONTAL_ALIGNMENT)) { myLayout.setHAlignmentFlags(newValue); refreshForm(); } else if (propertyName.equals(PROPERTY_NAME_VERTICAL_ALIGNMENT)) { myLayout.setVAlignmentFlags(newValue); refreshForm(); } else if (propertyName.equals(PROPERTY_NAME_TITLEVISIBLE)) { setTitleVisibleProperty(newValue); refreshForm(); } } // enableAndDisable It should not be called until the component is initialized. // Otherwise, we'll get NPEs in trying to use myAlignmentPropertyEditor. private void adjustAlignmentDropdowns() { if (initialized) enableAndDisableDropdowns(); } // Don't forget to call this on initialization!!! // If scrollable is True, the selector for vertical alignment should be disabled. private void enableAndDisableDropdowns() { String scrollable = properties.getProperty(PROPERTY_NAME_SCROLLABLE).getValue(); if (scrollable.equals("True")) { myVAlignmentPropertyEditor.disable(); } else { myVAlignmentPropertyEditor.enable(); } } @Override public EditableProperties getProperties() { // Before we return the Properties object, we make sure that the // Sizing, ShowListsAsJson and TutorialURL properties have the // value from the project's properties this is because these are // per project, not per Screen(Form) We only have to do this on // screens other then screen1 because screen1's value is // definitive. if (!editor.isScreen1()) { properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING)); properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_SHOW_LISTS_AS_JSON, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_SHOW_LISTS_AS_JSON)); properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_TUTORIAL_URL, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_TUTORIAL_URL)); properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_BLOCK_SUBSET, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_BLOCK_SUBSET)); properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_ACTIONBAR, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_ACTIONBAR)); properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_THEME, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_THEME)); properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_PRIMARY_COLOR, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_PRIMARY_COLOR)); properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_PRIMARY_COLOR_DARK, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_PRIMARY_COLOR_DARK)); properties.changePropertyValue(SettingsConstants.YOUNG_ANDROID_SETTINGS_ACCENT_COLOR, editor.getProjectEditor().getProjectSettingsProperty( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_ACCENT_COLOR)); } return properties; } }
package org.jnosql.artemis.column; import org.jnosql.artemis.ConfigurationReader; import org.jnosql.artemis.ConfigurationSettingsUnit; import org.jnosql.artemis.ConfigurationUnit; import org.jnosql.artemis.reflection.Reflections; import org.jnosql.diana.api.column.ColumnConfiguration; import org.jnosql.diana.api.column.ColumnConfigurationAsync; import org.jnosql.diana.api.column.ColumnFamilyManager; import org.jnosql.diana.api.column.ColumnFamilyManagerAsync; import org.jnosql.diana.api.column.ColumnFamilyManagerAsyncFactory; import org.jnosql.diana.api.column.ColumnFamilyManagerFactory; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Instance; import javax.enterprise.inject.Produces; import javax.enterprise.inject.spi.Annotated; import javax.enterprise.inject.spi.InjectionPoint; import javax.inject.Inject; /** * The class that creates {@link ColumnFamilyManagerFactory} and {@link ColumnFamilyManagerAsyncFactory} * from the {@link ConfigurationUnit} */ @ApplicationScoped class ColumnFamilyManagerFactoryProducer { @Inject private Reflections reflections; @Inject private Instance<ConfigurationReader> configurationReader; @ConfigurationUnit @Produces public <T extends ColumnFamilyManager> ColumnFamilyManagerFactory<T> getColumnConfigurationGenerics(InjectionPoint injectionPoint) { return gettColumnFamilyManagerFactory(injectionPoint); } @ConfigurationUnit @Produces public ColumnFamilyManagerFactory getColumnConfiguration(InjectionPoint injectionPoint) { return gettColumnFamilyManagerFactory(injectionPoint); } @ConfigurationUnit @Produces public <T extends ColumnFamilyManagerAsync> ColumnFamilyManagerAsyncFactory<T> getColumnConfigurationAsyncGeneric(InjectionPoint injectionPoint) { return gettColumnFamilyManagerAsyncFactory(injectionPoint); } @ConfigurationUnit @Produces public ColumnFamilyManagerAsyncFactory getColumnConfigurationAsync(InjectionPoint injectionPoint) { return gettColumnFamilyManagerAsyncFactory(injectionPoint); } private <T extends ColumnFamilyManagerAsync> ColumnFamilyManagerAsyncFactory<T> gettColumnFamilyManagerAsyncFactory(InjectionPoint injectionPoint) { Annotated annotated = injectionPoint.getAnnotated(); ConfigurationUnit annotation = annotated.getAnnotation(ConfigurationUnit.class); ConfigurationSettingsUnit unit = configurationReader.get().read(annotation, ColumnConfigurationAsync.class); Class<ColumnConfigurationAsync> configurationClass = unit.<ColumnConfigurationAsync>getProvider() .orElseThrow(() -> new IllegalStateException("The ColumnConfiguration provider is required in the configuration")); ColumnConfigurationAsync columnConfiguration = reflections.newInstance(configurationClass); ColumnFamilyManagerAsyncFactory columnFamilyManagerFactory = columnConfiguration.getAsync(unit.getSettings()); return columnFamilyManagerFactory; } private <T extends ColumnFamilyManager> ColumnFamilyManagerFactory<T> gettColumnFamilyManagerFactory(InjectionPoint injectionPoint) { Annotated annotated = injectionPoint.getAnnotated(); ConfigurationUnit annotation = annotated.getAnnotation(ConfigurationUnit.class); ConfigurationSettingsUnit unit = configurationReader.get().read(annotation, ColumnConfiguration.class); Class<ColumnConfiguration> configurationClass = unit.<ColumnConfiguration>getProvider() .orElseThrow(() -> new IllegalStateException("The ColumnConfiguration provider is required in the configuration")); ColumnConfiguration columnConfiguration = reflections.newInstance(configurationClass); ColumnFamilyManagerFactory columnFamilyManagerFactory = columnConfiguration.get(unit.getSettings()); return columnFamilyManagerFactory; } }
package org.intermine.bio.dataconversion; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.intermine.bio.dataconversion.ChadoSequenceProcessor.FeatureData; import org.intermine.objectstore.ObjectStoreException; import org.intermine.xml.full.Item; import org.intermine.xml.full.Reference; import org.intermine.xml.full.ReferenceList; /** * Create items from the modENCODE metadata extensions to the chado schema. * @author Kim Rutherford */ public class ModEncodeMetaDataProcessor extends ChadoProcessor { private static final Logger LOG = Logger.getLogger(ModEncodeMetaDataProcessor.class); // maps to link chado identifiers with intermineObjectId (Integer, Integer) // and chado identifiers with item identifiers (Integer, String) private Map<Integer, Integer> protocolIdMap = new HashMap<Integer, Integer>(); private Map<Integer, String> protocolIdRefMap = new HashMap<Integer, String>(); private Map<Integer, Integer> providerIdMap = new HashMap<Integer, Integer>(); private Map<Integer, String> providerIdRefMap = new HashMap<Integer, String>(); private Map<Integer, Integer> appliedProtocolIdMap = new HashMap<Integer, Integer>(); private Map<Integer, String> appliedProtocolIdRefMap = new HashMap<Integer, String>(); // maps from chado identifier to specific objects private Map<Integer, ExperimentSubmissionDetails> experimentMap = new HashMap<Integer, ExperimentSubmissionDetails>(); private Map<Integer, AppliedProtocol> appliedProtocolMap = new HashMap<Integer, AppliedProtocol>(); private Map<Integer, AppliedData> appliedDataMap = new HashMap<Integer, AppliedData>(); // list of firstAppliedProtocols, first level of the DAG linking // the applied protocols through the data (and giving the flow // of data) private List<Integer> firstAppliedProtocols = new ArrayList<Integer>(); // maps of the initial input data and final output data for an experiment private Map<Integer, List<Integer>> inDataMap = new HashMap<Integer, List<Integer>>(); private Map<Integer, List<Integer>> outDataMap = new HashMap<Integer, List<Integer>>(); // map used to store all data relative to an experiment private Map<Integer, List<Integer>> experimentDataMap = new HashMap<Integer, List<Integer>>(); // just for debugging private Map<String, String> debugMap = new HashMap<String, String>(); // itemIdentifier, type private static final String PREFIX = "http://www.flymine.org/model/genomic private static class ExperimentSubmissionDetails { // the identifier assigned to Item eg. "0_23" private String itemIdentifier; // the object id of the stored Item private Integer interMineObjectId; // the identifier assigned to Provider Item for this object private String providerItemIdentifier; } /** * Data to reconstruct the flow of submission data * */ private static class AppliedProtocol { private Integer experimentId; // chado private Integer protocolId; private String itemIdentifier; // e.g. "0_12" private Integer intermineObjectId; private Integer levelDag; // not used // the output data associated to this applied protocol private List<Integer> outputData = new ArrayList<Integer>(); private List<Integer> inputData = new ArrayList<Integer>(); } /** * Data to reconstruct the flow of submission data * */ private static class AppliedData { private String itemIdentifier; private Integer intermineObjectId; // the list of applied protocols for which this data item is an input private List<Integer> nextAppliedProtocols = new ArrayList<Integer>(); private List<Integer> previousAppliedProtocols = new ArrayList<Integer>(); private Integer appliedProtocolDataId; private Integer dataId; } /** * Create a new ChadoModuleProcessor object * @param chadoDBConverter the converter that created this Processor */ public ModEncodeMetaDataProcessor(ChadoDBConverter chadoDBConverter) { super(chadoDBConverter); } /** * {@inheritDoc} * Note:TODO * */ @Override public void process(Connection connection) throws Exception { processProviderTable(connection); processProviderAttributes(connection); processExperimentTable(connection); processExperimentProps(connection); processProtocolTable(connection); processProtocolAttributes(connection); // process features and keep a map from chado feature_id to info Map<Integer, FeatureData> featureMap = processFeatures(connection, experimentMap); processAppliedProtocolTable(connection); processAppliedProtocolData(connection); processDataFeatureTable(connection, featureMap); processDag(connection); // set references setExperimentRefs(connection); setExperimentInputRefs(connection); setExperimentResultsRefs(connection); setExperimentProtocolsRefs(connection); setDAGRefs(connection); LOG.info("REF: SD size: initialData: " + inDataMap.get(32).size()); LOG.info("REF: SD size: allData: " + experimentDataMap.get(32).size()); LOG.info("REF: SD size: outData: " + outDataMap.get(32).size()); } /** * Query for features that referenced by the experiments in the experimentMap. * * @param experimentIdRefMap map from experiment_id from chado to InterMineObject id */ private Map<Integer, FeatureData> processFeatures(Connection connection, Map<Integer, ExperimentSubmissionDetails> experimentMap) throws Exception { Map<Integer, FeatureData> featureMap = new HashMap<Integer, FeatureData>(); for (Map.Entry<Integer, ExperimentSubmissionDetails> entry: experimentMap.entrySet()) { Integer chadoExperimentId = entry.getKey(); ExperimentSubmissionDetails experimentSubmissionDetails = entry.getValue(); String experimentItemIdentifier = experimentSubmissionDetails.itemIdentifier; String providerItemIdentifier = experimentSubmissionDetails.providerItemIdentifier; ModEncodeFeatureProcessor processor = new ModEncodeFeatureProcessor(getChadoDBConverter(), experimentItemIdentifier, providerItemIdentifier, chadoExperimentId); processor.process(connection); featureMap.putAll(processor.getFeatureMap()); } return featureMap; } /** * In chado, Applied protocols in a submission are linked to each other via * the flow of data (output of a parent AP are input to a child AP). * The method process the data from chado to build the objects * (ExperimentSubmissionDetails, AppliedProtocol, AppliedData) and their * respective maps to chado identifiers needed to traverse the DAG. * * @param connection * @throws SQLException * @throws ObjectStoreException */ private void processDag(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getDAGResultSet(connection); AppliedProtocol node = new AppliedProtocol(); AppliedData branch = null; Integer previousAppliedProtocolId = 0; while (res.next()) { Integer experimentId = new Integer(res.getInt("experiment_id")); Integer protocolId = new Integer(res.getInt("protocol_id")); Integer appliedProtocolId = new Integer(res.getInt("applied_protocol_id")); Integer appliedDataId = new Integer(res.getInt("applied_protocol_data_id")); Integer dataId = new Integer(res.getInt("data_id")); String direction = res.getString("direction"); Integer level = 0; // build a data node for each iteration if (appliedDataMap.containsKey(appliedDataId)) { branch = appliedDataMap.get(appliedDataId); } else { branch = new AppliedData(); } // could use > (order by apid, apdataid, direction) // NB: using isLast() is expensive if (!appliedProtocolId.equals(previousAppliedProtocolId) || res.isLast()) { // last one: fill the list (should be an output) if (res.isLast()) { if (direction.equalsIgnoreCase("output")) { node.outputData.add(appliedDataId); } } // if it is not the first iteration, let's store it if (previousAppliedProtocolId > 0) { appliedProtocolMap.put(previousAppliedProtocolId, node); } // new node AppliedProtocol newNode = new AppliedProtocol(); newNode.protocolId = protocolId; newNode.experimentId = experimentId; newNode.levelDag = level; // map all data to their experiment addToMap (experimentDataMap, experimentId, appliedDataId); // the experimentId != null for the first applied protocol if (experimentId > 0) { firstAppliedProtocols.add(appliedProtocolId); newNode.levelDag = 1; // not needed if (direction.startsWith("in")) { // .. the map of initial data for the experiment addToMap (inDataMap, experimentId, appliedDataId); } } if (direction.startsWith("in")) { // add this applied protocol to the list of nextAppliedProtocols branch.nextAppliedProtocols.add(appliedProtocolId); // ..and update the map if (appliedDataMap.containsKey(appliedDataId)) { appliedDataMap.remove(appliedDataId); } appliedDataMap.put(appliedDataId, branch); // .. and add the dataId to the list of input Data for this applied protocol newNode.inputData.add(appliedDataId); } else if (direction.startsWith("out")) { // add the dataId to the list of output Data for this applied protocol: // it will be used to link to the next set of applied protocols newNode.outputData.add(appliedDataId); } else { // in case there is some problem with the strings 'input' or 'output' throw new IllegalArgumentException("Data direction not valid for dataId: " + dataId + "|" + direction + "|"); } // for the new round.. node = newNode; previousAppliedProtocolId = appliedProtocolId; } else { // keep feeding IN et OUT if (direction.startsWith("in")) { node.inputData.add(appliedDataId); // it should be enough to use the input for filling the // experimentDataMap: apart from the initial ones, // the inputs are outputs of other levels addToMap (experimentDataMap, experimentId, appliedDataId); addToMap (inDataMap, experimentId, appliedDataId); // as above branch.nextAppliedProtocols.add(appliedProtocolId); if (!appliedDataMap.containsKey(appliedDataId)) { appliedDataMap.put(appliedDataId, branch); } else { appliedDataMap.remove(appliedDataId); appliedDataMap.put(appliedDataId, branch); } } else if (direction.startsWith("out")) { node.outputData.add(appliedDataId); } else { throw new IllegalArgumentException("Data direction not valid for dataId: " + dataId + "|" + direction + "|"); } } } LOG.info("created " + appliedProtocolMap.size() + " DAG nodes in map"); res.close(); // printMapAP (appliedProtocolMap); // printMapDATA (appliedDataMap); // now traverse the DAG, and associate experiment with all the applied protocols traverseDag(); } /** * method to add an element to a list which is the value of a map * @param m the map (<Integer, List<Integer>>) * @param key the key for the map * @param value the list */ private void addToMap(Map<Integer, List<Integer>> m, Integer key, Integer value) { List<Integer> ids = new ArrayList<Integer>(); if (m.containsKey(key)) { ids = m.get(key); } if (!ids.contains(value)) { ids.add(value); m.put(key, ids); } } /** * Return the rows needed to construct the DAG of the data/protocols. * The reference to the experiment is available only for the first set * of applied protocols, hence the outer join. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getDAGResultSet(Connection connection) throws SQLException { String query = "SELECT eap.experiment_id, ap.protocol_id, apd.applied_protocol_id" + " , apd.data_id, apd.applied_protocol_data_id, apd.direction" + " FROM applied_protocol ap LEFT JOIN experiment_applied_protocol eap" + " ON (eap.first_applied_protocol_id = ap.applied_protocol_id )" + " , applied_protocol_data apd" + " WHERE apd.applied_protocol_id = ap.applied_protocol_id" + " ORDER By 3,5,6"; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * Applies iteratively buildADaglevel * * @throws SQLException * @throws ObjectStoreException */ private void traverseDag() throws SQLException, ObjectStoreException { List<Integer> currentIterationAP = firstAppliedProtocols; List<Integer> nextIterationAP = new ArrayList<Integer>(); while (currentIterationAP.size() > 0) { nextIterationAP = buildADagLevel (currentIterationAP); currentIterationAP = nextIterationAP; //LOG.info("DB ITER: " + currentIterationAP.toString()); } } /** * This method is given a set of applied protocols (already associated with an experiment) * and produces the next set of applied protocols. The latter are the protocols attached to the * output data of the starting set (output data for a applied protocol is the input data for the * next one). * It also fills the map linking directly results ('leaf' output data) with experiment * * @param previousAppliedProtocols * @return the next batch of appliedProtocolId * @throws SQLException * @throws ObjectStoreException */ private List<Integer> buildADagLevel(List<Integer> previousAppliedProtocols) throws SQLException, ObjectStoreException { List<Integer> nextIterationProtocols = new ArrayList<Integer>(); Iterator<Integer> pap = previousAppliedProtocols.iterator(); while (pap.hasNext()) { List<Integer> outputs = new ArrayList<Integer>(); Integer currentId = pap.next(); outputs.addAll(appliedProtocolMap.get(currentId).outputData); Integer experimentId = appliedProtocolMap.get(currentId).experimentId; //Integer levelDag = appliedProtocolMap.get(currentId).levelDag++; Iterator<Integer> od = outputs.iterator(); while (od.hasNext()) { Integer currentOD = od.next(); List<Integer> nextProtocols = new ArrayList<Integer>(); // build map experiment-data addToMap (experimentDataMap, experimentId, currentOD); if (appliedDataMap.containsKey(currentOD)) { // fill the list of next (children) protocols nextProtocols.addAll(appliedDataMap.get(currentOD).nextAppliedProtocols); if (appliedDataMap.get(currentOD).nextAppliedProtocols.isEmpty()) { // this is a leaf!! // we store it in a map that links it directly to the experiment addToMap(outDataMap, experimentId, currentOD); } } // build the list of children applied protocols chado identifiers // as input for the next iteration Iterator<Integer> nap = nextProtocols.iterator(); while (nap.hasNext()) { Integer currentAPId = nap.next(); // and fill the map with the chado experimentId appliedProtocolMap.get(currentAPId).experimentId = experimentId; //appliedProtocolMap.get(currentAPId).levelDag = (levelDag); nextIterationProtocols.add(currentAPId); // and set the reference from applied protocol to the submission Reference reference = new Reference(); reference.setName("experimentSubmission"); reference.setRefId(experimentMap.get(experimentId).itemIdentifier); getChadoDBConverter().store(reference, appliedProtocolIdMap.get(currentAPId)); LOG.info("DB REFEX: " + experimentId + "|" + currentAPId + "|" + appliedProtocolIdMap.get(currentAPId)); } } } return nextIterationProtocols; } private void processProviderTable(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getProviderResultSet(connection); int count = 0; while (res.next()) { Integer experimentId = new Integer(res.getInt("experiment_id")); String value = res.getString("value"); Item provider = getChadoDBConverter().createItem("ModEncodeProvider"); provider.setAttribute("name", value); Integer intermineObjectId = getChadoDBConverter().store(provider); storeInProviderMaps(provider, experimentId, intermineObjectId); //providerIdMap .put(experimentId, intermineObjectId); //providerIdRefMap .put(experimentId, provider.getIdentifier()); count++; } LOG.info("created " + count + " providers"); res.close(); } /** * Return the rows needed from the provider table. * We use the surname of the Principal Investigator (person ranked 0) * as the provider name. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getProviderResultSet(Connection connection) throws SQLException { String query = "SELECT a.experiment_id, a.value||' '||b.value as value" + " FROM experiment_prop a, experiment_prop b" + " where a.experiment_id = b.experiment_id" + " and b.name = 'Person Last Name'" + " and a.name = 'Person First Name'" + " and a.rank = 0" + " and b.rank = 0"; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * to store provider attributes * only affiliation for now!! * * @param connection * @throws SQLException * @throws ObjectStoreException */ private void processProviderAttributes(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getProviderAttributesResultSet(connection); int count = 0; while (res.next()) { Integer experimentId = new Integer(res.getInt("experiment_id")); String heading = res.getString("name"); String value = res.getString("value"); String fieldName = PROVIDER_FIELD_NAME_MAP.get(heading); if (fieldName == null) { LOG.error("NOT FOUND in PROVIDER_FIELD_NAME_MAP: " + heading); continue; } else if (fieldName == NOT_TO_BE_LOADED) { continue; } setAttribute(providerIdMap.get(experimentId), fieldName, value); count++; } LOG.info("created " + count + " provider properties"); res.close(); } /** * Return the rows needed for provider from the provider_prop table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getProviderAttributesResultSet(Connection connection) throws SQLException { String query = "SELECT experiment_id, name, value" + " FROM experiment_prop" + " where rank=0 "; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private void processExperimentTable(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getExperimentResultSet(connection); int count = 0; while (res.next()) { Integer experimentId = new Integer(res.getInt("experiment_id")); // String name = res.getString("name"); Item experiment = getChadoDBConverter().createItem("ExperimentSubmission"); // experiment.setAttribute("name", name); // setting reference from experiment to provider.. if (!debugMap.get(providerIdRefMap.get(experimentId)) .equals(PREFIX + "ModEncodeProvider")) { throw new IllegalArgumentException( "Type mismatch!!: expecting ModEncodeProvider, getting " + debugMap.get(providerIdRefMap.get(experimentId)).substring(37) + " with experimentId = " + experimentId); } String providerItemIdentifier = providerIdRefMap.get(experimentId); experiment.setReference("provider", providerItemIdentifier); // ..store all Integer intermineObjectId = getChadoDBConverter().store(experiment); // ..and fill the ExperimentSubmissionDetails object ExperimentSubmissionDetails details = new ExperimentSubmissionDetails(); details.interMineObjectId = intermineObjectId; details.itemIdentifier = experiment.getIdentifier(); details.providerItemIdentifier = providerItemIdentifier; experimentMap.put(experimentId, details); debugMap .put(details.itemIdentifier, experiment.getClassName()); count++; } LOG.info("created " + count + " experiments"); res.close(); } /** * Return the rows needed from the experiment table. * NB: for the moment not using the uniquename, but the name from the * experiment_prop table * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getExperimentResultSet(Connection connection) throws SQLException { String query = "SELECT experiment_id, uniquename" + " FROM experiment"; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * to store experiment attributes * * @param connection * @throws SQLException * @throws ObjectStoreException */ private void processExperimentProps(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getExperimentPropResultSet(connection); int count = 0; while (res.next()) { Integer experimentId = new Integer(res.getInt("experiment_id")); String heading = res.getString("name"); String value = res.getString("value"); String fieldName = FIELD_NAME_MAP.get(heading); if (fieldName == null) { LOG.error("NOT FOUND in FIELD_NAME_MAP: " + heading + " [experiment]"); continue; } else if (fieldName == NOT_TO_BE_LOADED) { continue; } setAttribute(experimentMap.get(experimentId).interMineObjectId, fieldName, value); count++; } LOG.info("created " + count + " experiment properties"); res.close(); } /** * Return the rows needed for experiment from the experiment_prop table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getExperimentPropResultSet(Connection connection) throws SQLException { String query = "SELECT ep.experiment_id, ep.name, ep.value " + "from experiment_prop ep "; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private void processProtocolTable(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getProtocolResultSet(connection); int count = 0; while (res.next()) { Integer protocolId = new Integer(res.getInt("protocol_id")); String name = res.getString("name"); String description = res.getString("description"); Item protocol = getChadoDBConverter().createItem("Protocol"); protocol.setAttribute("name", name); protocol.setAttribute("description", description); Integer intermineObjectId = getChadoDBConverter().store(protocol); storeInProtocolMaps (protocol, protocolId, intermineObjectId); //protocolIdMap .put(protocolId, intermineObjectId); //protocolIdRefMap .put(protocolId, protocol.getIdentifier()); count++; } LOG.info("created " + count + " protocols"); res.close(); } /** * Return the rows needed from the protocol table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getProtocolResultSet(Connection connection) throws SQLException { String query = "SELECT protocol_id, name, description" + " FROM protocol"; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * to store protocol attributes * * @param connection * @throws SQLException * @throws ObjectStoreException */ private void processProtocolAttributes(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getProtocolAttributesResultSet(connection); int count = 0; while (res.next()) { Integer protocolId = new Integer(res.getInt("protocol_id")); String heading = res.getString("heading"); String value = res.getString("value"); String fieldName = FIELD_NAME_MAP.get(heading); if (fieldName == null) { LOG.error("NOT FOUND in FIELD_NAME_MAP: " + heading + " [protocol]"); continue; } else if (fieldName == NOT_TO_BE_LOADED) { continue; } setAttribute(protocolIdMap.get(protocolId), fieldName, value); count++; } LOG.info("created " + count + " protocol attributes"); res.close(); } /** * Return the rows needed for protocols from the attribute table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getProtocolAttributesResultSet(Connection connection) throws SQLException { String query = "SELECT p.protocol_id, a.heading, a.value " + "from protocol p, attribute a, protocol_attribute pa " + "where pa.attribute_id = a.attribute_id " + "and pa.protocol_id = p.protocol_id "; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private void processAppliedProtocolTable(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getAppliedProtocolResultSet(connection); int count = 0; while (res.next()) { Integer appliedProtocolId = new Integer(res.getInt("applied_protocol_id")); Integer protocolId = new Integer(res.getInt("protocol_id")); Integer experimentId = new Integer(res.getInt("experiment_id")); Item appliedProtocol = getChadoDBConverter().createItem("AppliedProtocol"); // for DEBUG, to rm if (!debugMap.get(protocolIdRefMap.get(protocolId)). equalsIgnoreCase(PREFIX + "Protocol")) { throw new IllegalArgumentException( "Type mismatch!!: expecting Protocol, getting " + debugMap.get(protocolIdRefMap.get(protocolId)).substring(37) + " with protocolId = " + protocolId + ", appliedProtocolId = " + appliedProtocolId); } // setting references to protocols appliedProtocol.setReference("protocol", protocolIdRefMap.get(protocolId)); if (experimentId > 0) { // for DEBUG, to rm if (!debugMap.get(experimentMap.get(experimentId).itemIdentifier). equals(PREFIX + "ExperimentSubmission")) { throw new IllegalArgumentException( "Type mismatch!!: expecting ExperimentSubmission, getting " + debugMap.get(experimentMap.get(experimentId) .itemIdentifier).substring(37) + " with experimentId = " + experimentId + ", appliedProtocolId = " + appliedProtocolId); } // setting reference to experimentSubmission // probably to rm (we do it later anyway). TODO: check appliedProtocol.setReference("experimentSubmission", experimentMap.get(experimentId).itemIdentifier); } // store it and add to maps Integer intermineObjectId = getChadoDBConverter().store(appliedProtocol); appliedProtocolIdMap .put(appliedProtocolId, intermineObjectId); appliedProtocolIdRefMap .put(appliedProtocolId, appliedProtocol.getIdentifier()); count++; } LOG.info("created " + count + " appliedProtocol"); res.close(); } /** * Return the rows needed from the appliedProtocol table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getAppliedProtocolResultSet(Connection connection) throws SQLException { String query = "SELECT eap.experiment_id ,ap.applied_protocol_id, ap.protocol_id" + " FROM applied_protocol ap" + " LEFT JOIN experiment_applied_protocol eap" + " ON (eap.first_applied_protocol_id = ap.applied_protocol_id )"; /* "SELECT ap.applied_protocol_id, ap.protocol_id, apd.data_id, apd.direction" + " FROM applied_protocol ap, applied_protocol_data apd" + " WHERE apd.applied_protocol_id = ap.applied_protocol_id"; */ LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * to store appliedProtocol attributes * * TODO: check what if you have different 'unit' for different parameters * of the applied protocol * * @param connection * @throws SQLException * @throws ObjectStoreException */ private void processAppliedProtocolData(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getAppliedProtocolDataResultSet(connection); int count = 0; while (res.next()) { Integer appliedProtocolDataId = new Integer(res.getInt("applied_protocol_data_id")); Integer appliedProtocolId = new Integer(res.getInt("applied_protocol_id")); Integer dataId = new Integer(res.getInt("data_id")); String name = res.getString("name"); String value = res.getString("value"); String direction = res.getString("direction"); String heading = res.getString("heading"); Item submissionData = getChadoDBConverter().createItem("SubmissionData"); if (name != null && !name.equals("")) { submissionData.setAttribute("name", name); } submissionData.setAttribute("value", value); submissionData.setAttribute("direction", direction); submissionData.setAttribute("type", heading); // store it and add to object and maps Integer intermineObjectId = getChadoDBConverter().store(submissionData); AppliedData aData = new AppliedData(); aData.dataId = dataId; //++check if needed aData.intermineObjectId = intermineObjectId; aData.itemIdentifier = submissionData.getIdentifier(); appliedDataMap.put(appliedProtocolDataId, aData); count++; } LOG.info("created " + count + " SubmissionData"); res.close(); } /** * Return the rows needed for data from the applied_protocol_data table. * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getAppliedProtocolDataResultSet(Connection connection) throws SQLException { String query = "SELECT apd.applied_protocol_id, apd.applied_protocol_data_id, apd.data_id," + " apd.direction, d.heading, d.name, d.value" + " FROM applied_protocol_data apd, data d" + " WHERE apd.data_id = d.data_id"; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private void processDataFeatureTable(Connection connection, Map<Integer, FeatureData> featureMap) throws SQLException, ObjectStoreException { ResultSet res = getDataFeatureResultSet(connection); while (res.next()) { Integer appliedProtocolDataId = new Integer(res.getInt("applied_protocol_data_id")); Integer dataId = new Integer(res.getInt("data_id")); Integer featureId = new Integer(res.getInt("feature_id")); String featureItemId = featureMap.get(featureId).getItemIdentifier(); FeatureData fd = featureMap.get(featureId); LOG.error(fd.getInterMineType() + ": " + fd.getChadoFeatureName() + ", " + fd.getChadoFeatureUniqueName()); Reference featureRef = new Reference("feature", featureItemId); getChadoDBConverter().store(featureRef, appliedDataMap.get(appliedProtocolDataId).intermineObjectId); } } /** * Read from data_feature and related tables * @param connection chado db connection * @return results from querying data_feature table * @throws SQLException */ private ResultSet getDataFeatureResultSet(Connection connection) throws SQLException { String query = "SELECT apd.applied_protocol_data_id, apd.data_id, df.feature_id" + " FROM applied_protocol_data apd, data d, data_feature df" + " WHERE apd.data_id = d.data_id" + " AND df.data_id = d.data_id" + " AND d.heading != 'Result File'"; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * to store applied protocols attributes * * @param connection * @throws SQLException * @throws ObjectStoreException */ private void processAppliedProtocolDataAttributes(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getAppliedProtocolDataAttributesResultSet(connection); int count = 0; while (res.next()) { Integer appliedProtocolId = new Integer(res.getInt("applied_protocol_id")); String heading = res.getString("heading"); String value = res.getString("value"); String fieldName = FIELD_NAME_MAP.get(heading); if (fieldName == null) { LOG.error("NOT FOUND in FIELD_NAME_MAP: " + heading + " [appliedProtocol]"); continue; } setAttribute(appliedProtocolIdMap.get(appliedProtocolId), fieldName, value); count++; } LOG.info("created " + count + " data attributes"); res.close(); } /** * Query to get the attributes for data linked to applied protocols * (see previous get method). * This is a protected method so that it can be overridden for testing. * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getAppliedProtocolDataAttributesResultSet(Connection connection) throws SQLException { String query = "select apd.applied_protocol_id, da.data_id, a.heading, a.value" + " from applied_protocol_data apd, data_attribute da, attribute a" + " where" + " apd.data_id = da.data_id" + " and da.attribute_id = a.attribute_id"; LOG.info("executing: " + query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * to store references between experiment and submissionData * (1 to many) */ private void setExperimentRefs(Connection connection) throws ObjectStoreException { Iterator<Integer> exp = experimentDataMap.keySet().iterator(); while (exp.hasNext()) { Integer thisExperimentId = exp.next(); List<Integer> dataIds = experimentDataMap.get(thisExperimentId); Iterator<Integer> dat = dataIds.iterator(); while (dat.hasNext()) { Integer currentId = dat.next(); Reference reference = new Reference(); reference.setName("experimentSubmission"); reference.setRefId(experimentMap.get(thisExperimentId).itemIdentifier); getChadoDBConverter().store(reference, appliedDataMap.get(currentId).intermineObjectId); } } } /** * to store references between experiment and its initial submissionData * (initial input of the experiment * (1 to many) */ private void setExperimentInputRefs(Connection connection) throws ObjectStoreException { LOG.info("REF: IN"); Iterator<Integer> exp = inDataMap.keySet().iterator(); while (exp.hasNext()) { Integer thisExperimentId = exp.next(); List<Integer> dataIds = inDataMap.get(thisExperimentId); Iterator<Integer> dat = dataIds.iterator(); ReferenceList collection = new ReferenceList(); collection.setName("inData"); while (dat.hasNext()) { Integer currentId = dat.next(); if (appliedDataMap.get(currentId) == null) { LOG.info("REF: XXX" + currentId); continue; } LOG.info("REF: " + currentId); LOG.info("REF: " + experimentMap.get(thisExperimentId).itemIdentifier); LOG.info("REF: " + appliedDataMap.get(currentId).intermineObjectId); LOG.info("REF: collection.addRefId(appliedDataMap.get(currentId).itemIdentifier); } getChadoDBConverter().store(collection, experimentMap.get(thisExperimentId).interMineObjectId); } LOG.info("REF: OUT"); } /** * to store references between experiment and its resulting submissionData * (final output of the experiment) * (1 to many) */ private void setExperimentResultsRefs(Connection connection) throws ObjectStoreException { Iterator<Integer> exp = outDataMap.keySet().iterator(); while (exp.hasNext()) { Integer thisExperimentId = exp.next(); List<Integer> dataIds = outDataMap.get(thisExperimentId); Iterator<Integer> dat = dataIds.iterator(); ReferenceList collection = new ReferenceList(); collection.setName("outData"); while (dat.hasNext()) { Integer currentId = dat.next(); if (appliedDataMap.get(currentId) == null) { continue; } collection.addRefId(appliedDataMap.get(currentId).itemIdentifier); } getChadoDBConverter().store(collection, experimentMap.get(thisExperimentId).interMineObjectId); } } //exp -> prot private void setExperimentProtocolsRefs(Connection connection) throws ObjectStoreException { Map<Integer, List<Integer>> expProtocolMap = new HashMap<Integer, List<Integer>>(); Iterator<Integer> apId = appliedProtocolMap.keySet().iterator(); while (apId.hasNext()) { Integer thisAP = apId.next(); AppliedProtocol ap = appliedProtocolMap.get(thisAP); addToMap(expProtocolMap, ap.experimentId, ap.protocolId); } Iterator<Integer> exp = expProtocolMap.keySet().iterator(); while (exp.hasNext()) { Integer thisExperimentId = exp.next(); List<Integer> protocolIds = expProtocolMap.get(thisExperimentId); Iterator<Integer> dat = protocolIds.iterator(); ReferenceList collection = new ReferenceList(); collection.setName("protocols"); while (dat.hasNext()) { Integer currentId = dat.next(); collection.addRefId(protocolIdRefMap.get(currentId)); } getChadoDBConverter().store(collection, experimentMap.get(thisExperimentId).interMineObjectId); } } /** * to store references between applied protocols and their input data * reverse reference: data -> next appliedProtocols * and between applied protocols and their output data * reverse reference: data -> previous appliedProtocols * (many to many) */ private void setDAGRefs(Connection connection) throws ObjectStoreException { Iterator<Integer> apId = appliedProtocolMap.keySet().iterator(); while (apId.hasNext()) { Integer thisAP = apId.next(); LOG.info("REF: LAST: " + thisAP); AppliedProtocol ap = appliedProtocolMap.get(thisAP); List<Integer> dataIds = ap.inputData; LOG.info("REF: LAST: inData" + dataIds); if (!dataIds.isEmpty()) { Iterator<Integer> i = dataIds.iterator(); ReferenceList collection = new ReferenceList(); collection.setName("inputData"); while (i.hasNext()) { Integer n = i.next(); collection.addRefId(appliedDataMap.get(n).itemIdentifier); LOG.info("REF: LAST: it--" + appliedDataMap.get(n).itemIdentifier); } LOG.info("REF: LAST: ob--" + ap.intermineObjectId); LOG.info("REF: LAST: ob--" + appliedProtocolIdMap.get(thisAP)); getChadoDBConverter().store(collection, appliedProtocolIdMap.get(thisAP)); } LOG.info("REF: LAST: List<Integer> outIds = ap.outputData; if (!outIds.isEmpty()) { Iterator<Integer> i = dataIds.iterator(); ReferenceList collection = new ReferenceList(); collection.setName("outputData"); while (i.hasNext()) { Integer n = i.next(); collection.addRefId(appliedDataMap.get(n).itemIdentifier); LOG.info("REF: LAST: it--" + appliedDataMap.get(n).itemIdentifier); } LOG.info("REF: LAST: ob--" + appliedProtocolIdMap.get(thisAP)); getChadoDBConverter().store(collection, appliedProtocolIdMap.get(thisAP)); } LOG.info("REF: LAST: ==================="); } } /** * maps from chado field names to ours. * if a field is not needed it is marked with NOT_TO_BE_LOADED * a check is performed and fields unaccounted for are logged. * * a specific provider field map is needed because we are using the same * chado table of the experiment to get the data. * used only for affiliation(!) */ private static final Map<String, String> FIELD_NAME_MAP = new HashMap<String, String>(); private static final Map<String, String> PROVIDER_FIELD_NAME_MAP = new HashMap<String, String>(); private static final String NOT_TO_BE_LOADED = "this is ; illegal - anyway"; static { // experiment FIELD_NAME_MAP.put("Investigation Title", "title"); FIELD_NAME_MAP.put("Experiment Description", "description"); FIELD_NAME_MAP.put("Experimental Design", "design"); FIELD_NAME_MAP.put("Experimental Factor Type", "factorType"); FIELD_NAME_MAP.put("Experimental Factor Name", "factorName"); FIELD_NAME_MAP.put("Quality Control Type", "qualityControl"); FIELD_NAME_MAP.put("Replicate Type", "replicate"); FIELD_NAME_MAP.put("Date of Experiment", "experimentDate"); FIELD_NAME_MAP.put("Public Release Date", "publicReleaseDate"); // FIELD_NAME_MAP.put("species", "organism"); // FIELD_NAME_MAP.put("PubMed ID", "publication"); FIELD_NAME_MAP.put("Person Last Name", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Affiliation", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person First Name", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Address", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Phone", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Email", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Roles", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("lab", NOT_TO_BE_LOADED); // data: parameter values FIELD_NAME_MAP.put("genome version", "genomeVersion"); FIELD_NAME_MAP.put("median value", "medianValue"); // data: result values FIELD_NAME_MAP.put("transcript ID", "transcriptId"); //FIELD_NAME_MAP.put("inner primer", "innerPrimer"); FIELD_NAME_MAP.put("outer primer", "outerPrimer"); FIELD_NAME_MAP.put("TraceArchive ID", "traceArchiveId"); FIELD_NAME_MAP.put("genbank ID", "genBankId"); FIELD_NAME_MAP.put("EST acc", "estAcc"); // data: source attributes FIELD_NAME_MAP.put("Source Name", "source"); FIELD_NAME_MAP.put("RNA ID", "RNAId"); FIELD_NAME_MAP.put("Cell Type", "cellType"); FIELD_NAME_MAP.put("Biosample #", "biosampleNr"); // data: parameter value attributes FIELD_NAME_MAP.put("Unit", "unit"); FIELD_NAME_MAP.put("Characteristics", "characteristics"); // data: the real thing? FIELD_NAME_MAP.put("Hybridization Name", "hybridizationName"); FIELD_NAME_MAP.put("Array Data File", "arrayDataFile"); FIELD_NAME_MAP.put("Array Design REF", "arrayDesignRef"); FIELD_NAME_MAP.put("Derived Array Data File", "derivedArrayDataFile"); FIELD_NAME_MAP.put("Result File", "resultFile"); // data: obsolete? // FIELD_NAME_MAP.put("", "arrayMatrixDateFile"); // FIELD_NAME_MAP.put("", "label"); // FIELD_NAME_MAP.put("", "source"); // FIELD_NAME_MAP.put("", "sample"); // FIELD_NAME_MAP.put("", "extract"); // FIELD_NAME_MAP.put("", "labelExtract"); // protocol FIELD_NAME_MAP.put("Protocol Type", "type"); FIELD_NAME_MAP.put("url protocol", "url"); FIELD_NAME_MAP.put("species", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("references", NOT_TO_BE_LOADED); } static { PROVIDER_FIELD_NAME_MAP.put("Person Affiliation", "affiliation"); PROVIDER_FIELD_NAME_MAP.put("Person Last Name", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Experiment Description", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Investigation Title", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Experimental Design", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Experimental Factor Name", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Experimental Factor Type", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Person First Name", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Person Address", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Person Phone", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Person Email", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Person Roles", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Quality Control Type", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Replicate Type", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("PubMed ID", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Date of Experiment", NOT_TO_BE_LOADED); PROVIDER_FIELD_NAME_MAP.put("Public Release Date", NOT_TO_BE_LOADED); } /** * to store identifiers in protocol maps. * simply store the proper values in the maps. * A check on the type is performed. Possibly can be avoided after more testing, * and the old commented lines can be reinstated (note that we need 3 methods, one * for each category of data. * * @param i * @param chadoId * @param intermineObjectId * @throws ObjectStoreException */ private void storeInProtocolMaps(Item i, Integer chadoId, Integer intermineObjectId) throws ObjectStoreException { if (i.getClassName().equals("http://www.flymine.org/model/genomic#Protocol")) { protocolIdMap .put(chadoId, intermineObjectId); protocolIdRefMap .put(chadoId, i.getIdentifier()); } else { throw new IllegalArgumentException("Type mismatch: expecting Protocol, getting " + i.getClassName().substring(37) + " with intermineObjectId = " + intermineObjectId + ", chadoId = " + chadoId); } debugMap .put(i.getIdentifier(), i.getClassName()); } /** * to store identifiers in provider maps. * @param i * @param chadoId * @param intermineObjectId * @throws ObjectStoreException */ private void storeInProviderMaps(Item i, Integer chadoId, Integer intermineObjectId) throws ObjectStoreException { if (i.getClassName().equals("http://www.flymine.org/model/genomic#ModEncodeProvider")) { providerIdMap .put(chadoId, intermineObjectId); providerIdRefMap .put(chadoId, i.getIdentifier()); } else { throw new IllegalArgumentException( "Type mismatch: expecting ModEncodeProvider, getting " + i.getClassName().substring(37) + " with intermineObjectId = " + intermineObjectId + ", chadoId = " + chadoId); } debugMap .put(i.getIdentifier(), i.getClassName()); } // utilities for debugging // to be removed private void printListMap (Map<Integer, List<Integer>> m) { Iterator i = m.keySet().iterator(); while (i.hasNext()) { Integer current = (Integer) i.next(); List ids = m.get(current); Iterator i2 = ids.iterator(); while (i2.hasNext()) { LOG.info("MAP: " + current + "|" + i2.next()); } LOG.info("MAP: ...."); } } private void printMap (Map<Integer, Integer> m) { Iterator<Integer> i = m.keySet().iterator(); while (i.hasNext()) { Integer thisId = i.next(); LOG.info("MAP: " + thisId + "|" + m.get(thisId)); } LOG.info("MAP: ...."); } private void printMapAP (Map<Integer, AppliedProtocol> m) { Iterator<Integer> i = m.keySet().iterator(); while (i.hasNext()) { Integer a = i.next(); AppliedProtocol ap = m.get(a); List<Integer> ids = ap.outputData; Iterator<Integer> i2 = ids.iterator(); while (i2.hasNext()) { LOG.info("DB APMAP " + a + ": " + i2.next()); } } } private void printMapDATA (Map<Integer, AppliedData> m) { Iterator<Integer> i = m.keySet().iterator(); while (i.hasNext()) { Integer a = i.next(); AppliedData ap = m.get(a); List<Integer> ids = ap.nextAppliedProtocols; Iterator i2 = ids.iterator(); while (i2.hasNext()) { LOG.info("DB DATAMAP " + a + ": " + i2.next()); } } } }
package org.intermine.bio.dataconversion; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.intermine.bio.util.OrganismRepository; import org.intermine.objectstore.ObjectStoreException; import org.intermine.util.StringUtil; import org.intermine.xml.full.Attribute; import org.intermine.xml.full.Item; import org.intermine.xml.full.Reference; import org.intermine.xml.full.ReferenceList; /** * Create items from the modENCODE metadata extensions to the chado schema. * @author Kim Rutherford,sc,rns */ public class ModEncodeMetaDataProcessor extends ChadoProcessor { private static final Logger LOG = Logger.getLogger(ModEncodeMetaDataProcessor.class); private static final String WIKI_URL = "http://wiki.modencode.org/project/index.php?title="; private static final String FILE_URL = "http://submit.modencode.org/submit/public/get_file/"; private static final Set<String> DB_RECORD_TYPES = Collections.unmodifiableSet(new HashSet<String>(Arrays.asList( "GEO_record", "ArrayExpress_record", "TraceArchive_record", "dbEST_record", "ShortReadArchive_project_ID (SRA)", "ShortReadArchive_project_ID_list (SRA)"))); // submission maps private Map<Integer, String> submissionOrganismMap = new HashMap<Integer, String>(); // maps from chado identifier to lab/project details private Map<Integer, SubmissionDetails> submissionMap = new HashMap<Integer, SubmissionDetails>(); // chado submission id to list of top level attributes, e.g. dev stage, organism_part private Map<Integer, String> dccIdMap = new HashMap<Integer, String>(); private Map<Integer, ExperimentalFactor> submissionEFMap = new HashMap<Integer, ExperimentalFactor>(); // applied_protocol/data/attribute maps // chado submission id to chado data_id private Map<Integer, List<Integer>> submissionDataMap = new HashMap<Integer, List<Integer>>(); // chado data id to chado submission id private Map<Integer, Integer> dataSubmissionMap = new HashMap<Integer, Integer>(); // used when traversing dag of applied protocols private Map<Integer, AppliedProtocol> appliedProtocolMap = new HashMap<Integer, AppliedProtocol>(); // used when traversing dag of applied protocols private Map<Integer, AppliedData> appliedDataMap = new HashMap<Integer, AppliedData>(); // project/lab/experiment/submission maps // for projects, the maps link the project name with the identifiers... private Map<String, Integer> projectIdMap = new HashMap<String, Integer>(); private Map<String, String> projectIdRefMap = new HashMap<String, String>(); // for labs, the maps link the lab name with the identifiers... private Map<String, Integer> labIdMap = new HashMap<String, Integer>(); private Map<String, String> labIdRefMap = new HashMap<String, String>(); // for experiment, the maps link the exp name (description!) with the identifiers... private Map<String, Integer> experimentIdMap = new HashMap<String, Integer>(); private Map<String, String> experimentIdRefMap = new HashMap<String, String>(); private Map<String, List<Integer>> expSubMap = new HashMap<String, List<Integer>>(); // ...we need a further map to link to submission private Map<Integer, String> submissionProjectMap = new HashMap<Integer, String>(); private Map<Integer, String> submissionLabMap = new HashMap<Integer, String>(); // submission/applied_protocol/protocol maps private Map<String, String> protocolsMap = new HashMap<String, String>(); private Map<Integer, String> protocolItemIds = new HashMap<Integer, String>(); private Map<String, Integer> protocolItemToObjectId = new HashMap<String, Integer>(); // submission chado id to item identifier of Protocol used to generate GFF private Map<Integer, String> scoreProtocols = new HashMap<Integer, String>(); private Map<Integer, Integer> publicationIdMap = new HashMap<Integer, Integer>(); private Map<Integer, String> publicationIdRefMap = new HashMap<Integer, String>(); private Map<Integer, String> protocolTypesMap = new HashMap<Integer, String>(); private Map<Integer, Integer> appliedProtocolIdMap = new HashMap<Integer, Integer>(); private Map<Integer, String> appliedProtocolIdRefMap = new HashMap<Integer, String>(); // list of firstAppliedProtocols, first level of the DAG linking // the applied protocols through the data (and giving the flow of data) private List<Integer> firstAppliedProtocols = new ArrayList<Integer>(); // experimental factor maps private Map<String, Integer> eFactorIdMap = new HashMap<String, Integer>(); private Map<String, String> eFactorIdRefMap = new HashMap<String, String>(); private Map<Integer, List<String>> submissionEFactorMap = new HashMap<Integer, List<String>>(); // caches // cache cv term names by id private Map<String, String> cvtermCache = new HashMap<String, String>(); private Map<String, String> devStageTerms = new HashMap<String, String>(); private Map<String, String> devOntologies = new HashMap<String, String>(); // just for debugging private Map<String, String> debugMap = new HashMap<String, String>(); // itemIdentifier, type private Map<String, Item> nonWikiSubmissionProperties = new HashMap<String, Item>(); private Map<String, Item> subItemsMap = new HashMap<String, Item>(); Map<Integer, SubmissionReference> submissionRefs = null; private IdResolverFactory flyResolverFactory = null; private IdResolverFactory wormResolverFactory = null; private Map<String, String> geneToItemIdentifier = new HashMap<String, String>(); private Map<DatabaseRecordKey, String> dbRecords = new HashMap<DatabaseRecordKey, String>(); private static class SubmissionDetails { // the identifier assigned to Item eg. "0_23" private String itemIdentifier; // the object id of the stored Item private Integer interMineObjectId; // the identifier assigned to lab Item for this object private String labItemIdentifier; private String title; } /** * AppliedProtocol class to reconstruct the flow of submission data */ private static class AppliedProtocol { private Integer submissionId; // chado private Integer protocolId; private Integer step; // the level in the dag for the AP // the output data associated to this applied protocol private List<Integer> outputs = new ArrayList<Integer>(); private List<Integer> inputs = new ArrayList<Integer>(); } /** * AppliedData class * to reconstruct the flow of submission data * */ private static class AppliedData { private String itemIdentifier; private Integer intermineObjectId; private Integer dataId; private String value; private String actualValue; private String type; private String name; // the list of applied protocols for which this data item is an input private List<Integer> nextAppliedProtocols = new ArrayList<Integer>(); private List<Integer> previousAppliedProtocols = new ArrayList<Integer>(); } /** * Experimental Factor class * to store the couples (type, name/value) of EF * note that in chado sometime the name is given, other times is the value */ private static class ExperimentalFactor { private Map<String, String> efTypes = new HashMap<String, String>(); private List<String> efNames = new ArrayList<String>(); } /** * Create a new ChadoProcessor object * @param chadoDBConverter the converter that created this Processor */ public ModEncodeMetaDataProcessor(ChadoDBConverter chadoDBConverter) { super(chadoDBConverter); } /** * {@inheritDoc} */ @Override public void process(Connection connection) throws Exception { processProjectTable(connection); processLabTable(connection); //processLabAttributes(connection); processSubmissionOrganism(connection); processSubmission(connection); processExperimentProps(connection); processProtocolTable(connection); processProtocolAttributes(connection); processAppliedProtocolTable(connection); processAppliedData(connection); processAppliedDataAttributes(connection); processExperiment(connection); processDag(connection); findScoreProtocols(); processFeatures(connection, submissionMap); // set references setSubmissionRefs(connection); setSubmissionExperimetRefs(connection); setDAGRefs(connection); // create DatabaseRecords where necessary for each submission createDatabaseRecords(connection); // create result files per submission createResultFiles(connection); // for high level attributes and experimental factors (EF) // TODO: clean up processEFactor(connection); flyResolverFactory = new FlyBaseIdResolverFactory("gene"); wormResolverFactory = new WormBaseChadoIdResolverFactory("gene"); processSubmissionProperties(connection); createRelatedSubmissions(connection); setSubmissionProtocolsRefs(connection); setSubmissionEFactorsRefs(connection); setSubmissionPublicationRefs(connection); } private void processFeatures(Connection connection, Map<Integer, SubmissionDetails> submissionMap) throws Exception { long bT = System.currentTimeMillis(); // to monitor time spent in the process // keep map of feature to submissions it has been referenced by, some features appear in // more than one submission Map<Integer, List<String>> subCollections = new HashMap<Integer, List<String>>(); // hold features that should only be processed once across all submissions, initialise // processor with this map each time Map <Integer, FeatureData> commonFeaturesMap = new HashMap<Integer, FeatureData>(); for (Map.Entry<Integer, SubmissionDetails> entry: submissionMap.entrySet()) { Map<Integer, FeatureData> subFeatureMap = new HashMap<Integer, FeatureData>(); Integer chadoExperimentId = entry.getKey(); SubmissionDetails submissionDetails = entry.getValue(); String submissionItemIdentifier = submissionDetails.itemIdentifier; String labItemIdentifier = submissionDetails.labItemIdentifier; String submissionTitle = submissionDetails.title; List<Integer> thisSubmissionDataIds = submissionDataMap.get(chadoExperimentId); LOG.debug("DATA IDS " + chadoExperimentId + ": " + thisSubmissionDataIds.size()); ModEncodeFeatureProcessor processor = new ModEncodeFeatureProcessor(getChadoDBConverter(), submissionItemIdentifier, labItemIdentifier, thisSubmissionDataIds, submissionTitle, scoreProtocols.get(chadoExperimentId)); processor.initialiseCommonFeatures(commonFeaturesMap); processor.process(connection); // all features related to this submission subFeatureMap.putAll(processor.getFeatureMap()); // features common across many submissions commonFeaturesMap.putAll(processor.getCommonFeaturesMap()); LOG.info("COMMON FEATURES: " + commonFeaturesMap.size()); if (subFeatureMap.keySet().size() == 0) { LOG.error("FEATMAP: submission " + chadoExperimentId + " has no featureMap keys."); continue; } LOG.info("FEATMAP: submission " + chadoExperimentId + "|" + "featureMap: " + subFeatureMap.keySet().size()); // Populate map of submissions to features, some features are in multiple submissions String queryList = StringUtil.join(thisSubmissionDataIds, ","); processDataFeatureTable(connection, subCollections, subFeatureMap, chadoExperimentId, queryList); // read any genes that have been created so we can re-use the same item identifiers // when creating antibody/strain target genes later extractGenesFromSubFeatureMap(processor, subFeatureMap); } storeSubmissionsCollections(subCollections); LOG.info("PROCESS TIME features: " + (System.currentTimeMillis() - bT)); } private void storeSubmissionsCollections(Map<Integer, List<String>> subCollections) throws ObjectStoreException { for (Map.Entry<Integer, List<String>> entry : subCollections.entrySet()) { Integer featureObjectId = entry.getKey(); ReferenceList collection = new ReferenceList("submissions", entry.getValue()); getChadoDBConverter().store(collection, featureObjectId); } } private void extractGenesFromSubFeatureMap(ModEncodeFeatureProcessor processor, Map<Integer, FeatureData> subFeatureMap) { for (FeatureData fData : subFeatureMap.values()) { if (fData.getInterMineType().equals("Gene")) { String geneIdentifier = processor.fixIdentifier(fData, fData.getUniqueName()); geneToItemIdentifier.put(geneIdentifier, fData.getItemIdentifier()); } } } private void processDataFeatureTable(Connection connection, Map<Integer, List<String>> subCols, Map<Integer, FeatureData> featureMap, Integer chadoExperimentId, String queryList) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getDataFeature(connection, queryList); String submissionItemId = submissionMap.get(chadoExperimentId).itemIdentifier; while (res.next()) { Integer dataId = new Integer(res.getInt("data_id")); Integer featureId = new Integer(res.getInt("feature_id")); FeatureData featureData = featureMap.get(featureId); if (featureData == null) { LOG.debug("Check feature type: no data for feature_id: " + featureId + " in processDataFeatureTable(), data_id =" + dataId); continue; } Integer featureObjectId = featureData.getIntermineObjectId(); List<String> subs = subCols.get(featureObjectId); if (subs == null) { subs = new ArrayList<String>(); subCols.put(featureObjectId, subs); } subs.add(submissionItemId); } LOG.info("PROCESS TIME data_feature table: " + (System.currentTimeMillis() - bT)); } private ResultSet getDataFeature(Connection connection, String queryList) throws SQLException { String query = "SELECT df.data_id, df.feature_id" + " FROM data_feature df " + " WHERE data_id in (" + queryList + ")"; return doQuery(connection, query, "getDataFeature"); } private void processDag(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getDAG(connection); AppliedProtocol node = new AppliedProtocol(); AppliedData branch = null; Integer count = 0; Integer actualSubmissionId = 0; // to store the experiment id (see below) Integer previousAppliedProtocolId = 0; while (res.next()) { Integer submissionId = new Integer(res.getInt("experiment_id")); Integer protocolId = new Integer(res.getInt("protocol_id")); Integer appliedProtocolId = new Integer(res.getInt("applied_protocol_id")); Integer dataId = new Integer(res.getInt("data_id")); String direction = res.getString("direction"); // build a data node for each iteration if (appliedDataMap.containsKey(dataId)) { branch = appliedDataMap.get(dataId); } else { branch = new AppliedData(); } // could use > (order by apid, apdataid, direction) // NB: using isLast() is expensive if (!appliedProtocolId.equals(previousAppliedProtocolId) || res.isLast()) { // the submissionId != null for the first applied protocol if (submissionId > 0) { firstAppliedProtocols.add(appliedProtocolId); // set actual submission id // we can either be at a first applied protocol (submissionId > 0).. actualSubmissionId = submissionId; } else { // ..or already down the dag, and we use the stored id. submissionId = actualSubmissionId; } // last one: fill the list of outputs // and add to the general list of data ids for the submission, // used to fetch features if (res.isLast()) { if (direction.equalsIgnoreCase("output")) { node.outputs.add(dataId); mapSubmissionAndData(submissionId, dataId); dataSubmissionMap.put(dataId, submissionId); } } // if it is not the first iteration, let's store it if (previousAppliedProtocolId > 0) { appliedProtocolMap.put(previousAppliedProtocolId, node); } // new node AppliedProtocol newNode = new AppliedProtocol(); newNode.protocolId = protocolId; newNode.submissionId = submissionId; if (direction.startsWith("in")) { // add this applied protocol to the list of nextAppliedProtocols branch.nextAppliedProtocols.add(appliedProtocolId); // ..and update the map if (appliedDataMap.containsKey(dataId)) { appliedDataMap.remove(dataId); } appliedDataMap.put(dataId, branch); // .. and add the dataId to the list of input Data for this applied protocol newNode.inputs.add(dataId); } else if (direction.startsWith("out")) { // add the dataId to the list of output Data for this applied protocol: // it will be used to link to the next set of applied protocols newNode.outputs.add(dataId); if (!previousAppliedProtocolId.equals(0)) { branch.previousAppliedProtocols.add(previousAppliedProtocolId); } } else { // there is some problem with the strings 'input' or 'output' throw new IllegalArgumentException("Data direction not valid for dataId: " + dataId + "|" + direction + "|"); } // for the new round.. node = newNode; previousAppliedProtocolId = appliedProtocolId; } else { // keep feeding IN et OUT if (direction.startsWith("in")) { node.inputs.add(dataId); if (submissionId > 0) { // initial data mapSubmissionAndData(submissionId, dataId); } // as above branch.nextAppliedProtocols.add(appliedProtocolId); if (!appliedDataMap.containsKey(dataId)) { appliedDataMap.put(dataId, branch); } else { appliedDataMap.remove(dataId); appliedDataMap.put(dataId, branch); } } else if (direction.startsWith("out")) { node.outputs.add(dataId); branch.previousAppliedProtocols.add(previousAppliedProtocolId); } else { throw new IllegalArgumentException("Data direction not valid for dataId: " + dataId + "|" + direction + "|"); } } count++; } LOG.info("created " + appliedProtocolMap.size() + "(" + count + " applied data points) DAG nodes (= applied protocols) in map"); res.close(); // now traverse the DAG, and associate submission with all the applied protocols traverseDag(); // set the dag level as an attribute to applied protocol setAppliedProtocolSteps(connection); LOG.info("PROCESS TIME DAG: " + (System.currentTimeMillis() - bT)); } /** * * to set the step attribute for the applied protocols * */ private void setAppliedProtocolSteps(Connection connection) throws ObjectStoreException { for (Integer appliedProtocolId : appliedProtocolMap.keySet()) { Integer step = appliedProtocolMap.get(appliedProtocolId).step; if (step != null) { Attribute attr = new Attribute("step", step.toString()); getChadoDBConverter().store(attr, appliedProtocolIdMap.get(appliedProtocolId)); } else { AppliedProtocol ap = appliedProtocolMap.get(appliedProtocolId); LOG.warn("AppliedProtocol.step not set for chado id: " + appliedProtocolId + " sub " + ap.submissionId + " inputs " + ap.inputs + " outputs " + ap.outputs); } } } // Look for protocols that were used to generated GFF files, these are passed to the feature // processor, if features have a score the protocol is set as the scoreProtocol reference. // NOTE this could equally be done with data, data_feature and applied_protocol_data private void findScoreProtocols() { for (Map.Entry<Integer, AppliedData> entry : appliedDataMap.entrySet()) { Integer dataId = entry.getKey(); AppliedData aData = entry.getValue(); if (aData.type.equals("Result File") && (aData.value.endsWith(".gff") || aData.value.endsWith("gff3"))) { for (Integer papId : aData.previousAppliedProtocols) { AppliedProtocol aProtocol = appliedProtocolMap.get(papId); String protocolItemId = protocolItemIds.get(aProtocol.protocolId); scoreProtocols.put(dataSubmissionMap.get(dataId), protocolItemId); } } } } /** * Return the rows needed to construct the DAG of the data/protocols. * The reference to the submission is available only for the first set * of applied protocols, hence the outer join. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getDAG(Connection connection) throws SQLException { String query = "SELECT eap.experiment_id, ap.protocol_id, apd.applied_protocol_id" + " , apd.data_id, apd.applied_protocol_data_id, apd.direction" + " FROM applied_protocol ap LEFT JOIN experiment_applied_protocol eap" + " ON (eap.first_applied_protocol_id = ap.applied_protocol_id )" + " , applied_protocol_data apd" + " WHERE apd.applied_protocol_id = ap.applied_protocol_id" + " ORDER By 3,5,6"; return doQuery(connection, query, "getDAG"); } /** * Applies iteratively buildADaglevel * * @throws SQLException * @throws ObjectStoreException */ private void traverseDag() throws SQLException, ObjectStoreException { List<Integer> currentIterationAP = firstAppliedProtocols; List<Integer> nextIterationAP = new ArrayList<Integer>(); Integer step = 1; // DAG level while (currentIterationAP.size() > 0) { nextIterationAP = buildADagLevel (currentIterationAP, step); currentIterationAP = nextIterationAP; step++; } } /** * This method is given a set of applied protocols (already associated with a submission) * and produces the next set of applied protocols. The latter are the protocols attached to the * output data of the starting set (output data for a applied protocol is the input data for the * next one). * It also fills the map linking directly results ('leaf' output data) with submission * * @param previousAppliedProtocols * @return the next batch of appliedProtocolId * @throws SQLException * @throws ObjectStoreException */ private List<Integer> buildADagLevel(List<Integer> previousAppliedProtocols, Integer step) throws SQLException, ObjectStoreException { List<Integer> nextIterationProtocols = new ArrayList<Integer>(); Iterator<Integer> pap = previousAppliedProtocols.iterator(); while (pap.hasNext()) { List<Integer> outputs = new ArrayList<Integer>(); List<Integer> inputs = new ArrayList<Integer>(); Integer currentId = pap.next(); // add the DAG level here only if these are the first AP if (step == 1) { appliedProtocolMap.get(currentId).step = step; } outputs.addAll(appliedProtocolMap.get(currentId).outputs); Integer submissionId = appliedProtocolMap.get(currentId).submissionId; Iterator<Integer> od = outputs.iterator(); while (od.hasNext()) { Integer currentOD = od.next(); List<Integer> nextProtocols = new ArrayList<Integer>(); // build map submission-data mapSubmissionAndData(submissionId, currentOD); if (appliedDataMap.containsKey(currentOD)) { // fill the list of next (children) protocols nextProtocols.addAll(appliedDataMap.get(currentOD).nextAppliedProtocols); if (appliedDataMap.get(currentOD).nextAppliedProtocols.isEmpty()) { // this is a leaf!! } } // to fill submission-dataId map // this is needed, otherwise inputs to AP that are not outputs // of a previous protocol are not considered inputs.addAll(appliedProtocolMap.get(currentId).inputs); Iterator<Integer> in = inputs.iterator(); while (in.hasNext()) { Integer currentIn = in.next(); // build map submission-data mapSubmissionAndData(submissionId, currentIn); } // build the list of children applied protocols chado identifiers // as input for the next iteration Iterator<Integer> nap = nextProtocols.iterator(); while (nap.hasNext()) { Integer currentAPId = nap.next(); // and fill the map with the chado experiment_id // and the DAG level appliedProtocolMap.get(currentAPId).submissionId = submissionId; appliedProtocolMap.get(currentAPId).step = step + 1; nextIterationProtocols.add(currentAPId); // and set the reference from applied protocol to the submission Reference reference = new Reference(); reference.setName("submission"); reference.setRefId(submissionMap.get(submissionId).itemIdentifier); getChadoDBConverter().store(reference, appliedProtocolIdMap.get(currentAPId)); } } } return nextIterationProtocols; } private void processSubmissionOrganism(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getSubmissionOrganism(connection); int count = 0; while (res.next()) { Integer submissionId = new Integer(res.getInt("experiment_id")); String value = res.getString("value"); submissionOrganismMap.put(submissionId, value); count++; } res.close(); LOG.info("found an organism for " + submissionOrganismMap.size() + " submissions."); LOG.info("PROCESS TIME organisms: " + (System.currentTimeMillis() - bT)); } /** * Return the row needed for the organism. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getSubmissionOrganism(Connection connection) throws SQLException { String query = "select distinct eap.experiment_id, a.value " + " from experiment_applied_protocol eap, applied_protocol ap, " + " protocol_attribute pa, attribute a " + " where eap.first_applied_protocol_id = ap.applied_protocol_id " + " and ap.protocol_id=pa.protocol_id " + " and pa.attribute_id=a.attribute_id " + " and a.heading='species' "; return doQuery(connection, query, "getSubmissionOrganism"); } private void processProjectTable(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getProjects(connection); int count = 0; while (res.next()) { Integer submissionId = new Integer(res.getInt("experiment_id")); String value = res.getString("value"); submissionProjectMap.put(submissionId, value); count++; } res.close(); Set <Integer> exp = submissionProjectMap.keySet(); Iterator <Integer> i = exp.iterator(); while (i.hasNext()) { Integer thisExp = i.next(); String project = submissionProjectMap.get(thisExp); if (projectIdMap.containsKey(project)) { continue; } LOG.debug("PROJECT: " + project); Item pro = getChadoDBConverter().createItem("Project"); pro.setAttribute("surnamePI", project); Integer intermineObjectId = getChadoDBConverter().store(pro); storeInProjectMaps(pro, project, intermineObjectId); } LOG.info("created " + projectIdMap.size() + " project"); LOG.info("PROCESS TIME projects: " + (System.currentTimeMillis() - bT)); } /** * Return the project name. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getProjects(Connection connection) throws SQLException { String query = "SELECT distinct a.experiment_id, a.value " + " FROM experiment_prop a " + " where a.name = 'Project' " + " AND rank=0"; return doQuery(connection, query, "getProjects"); } private void processLabTable(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getLabs(connection); int count = 0; while (res.next()) { Integer submissionId = new Integer(res.getInt("experiment_id")); String value = res.getString("value"); submissionLabMap.put(submissionId, value); count++; } res.close(); Set <Integer> exp = submissionLabMap.keySet(); Iterator <Integer> i = exp.iterator(); while (i.hasNext()) { Integer thisExp = i.next(); String prov = submissionLabMap.get(thisExp); String project = submissionProjectMap.get(thisExp); if (labIdMap.containsKey(prov)) { continue; } LOG.debug("PROV: " + prov); Item lab = getChadoDBConverter().createItem("Lab"); lab.setAttribute("surname", prov); lab.setReference("project", projectIdRefMap.get(project)); Integer intermineObjectId = getChadoDBConverter().store(lab); storeInLabMaps(lab, prov, intermineObjectId); } LOG.info("created " + labIdMap.size() + " labs"); LOG.info("PROCESS TIME labs: " + (System.currentTimeMillis() - bT)); } /** * Return the lab name. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getLabs(Connection connection) throws SQLException { String query = "SELECT distinct a.experiment_id, a.name, a.value " + " FROM experiment_prop a " + " where a.name = 'Lab' " + " AND a.rank=0"; return doQuery(connection, query, "getLabs"); } private void processExperiment(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getExperimentTitles(connection); Map<String, String> expProMap = new HashMap<String, String>(); while (res.next()) { Integer submissionId = new Integer(res.getInt("experiment_id")); String name = cleanWikiLinks(res.getString("name")); addToMap(expSubMap, name, submissionId); expProMap.put(name, submissionProjectMap.get(submissionId)); } res.close(); Set <String> experiment = expSubMap.keySet(); Iterator <String> i = experiment.iterator(); while (i.hasNext()) { String name = i.next(); Item exp = getChadoDBConverter().createItem("Experiment"); exp.setAttribute("name", name); String project = expProMap.get(name); exp.setReference("project", projectIdRefMap.get(project)); // note: the reference to submission collection is in a separate method Integer intermineObjectId = getChadoDBConverter().store(exp); experimentIdMap .put(name, intermineObjectId); experimentIdRefMap .put(name, exp.getIdentifier()); } LOG.info("created " + expSubMap.size() + " experiments"); LOG.info("PROCESS TIME experiments: " + (System.currentTimeMillis() - bT)); } /** * method to clean a wiki reference (url to a named page) in chado * @param w the wiki reference */ private String cleanWikiLinks(String w) { String url = "http://wiki.modencode.org/project/index.php?title="; // we are stripping from first ':', maybe we want to include project suffix // now: Gene Model Prediction // maybe? Gene Model Prediction:SC:1 String w1 = StringUtils.replace(w, url, ""); String s1 = null; if (w1.contains(":")){ s1 = StringUtils.substringBefore(w1, ":"); } else { // for links missing the : char, e.g. // MacAlpine Early Origin of Replication Identification&oldid=10464 s1 = StringUtils.substringBefore(w1, "&"); } String s = s1.replace('"', ' ').trim(); if (s.contains("%E2%80%99")) { // prime: for the Piano experiment String s2 = s.replace("%E2%80%99", "'"); return s2; } return s; } /** * Return the rows needed for experiment from the experiment_prop table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getExperimentTitles(Connection connection) throws SQLException { // TODO use standard SQl and deal with string in java String query = "select e.experiment_id, " + " translate(x.accession, '_', ' ') as name " + " from experiment_prop e, dbxref x " + " where e.dbxref_id = x.dbxref_id " + " and e.name='Experiment Description' "; return doQuery(connection, query, "getExperimentTitles"); } private void processSubmission(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getSubmissions(connection); int count = 0; while (res.next()) { Integer submissionId = new Integer(res.getInt("experiment_id")); String name = res.getString("uniquename"); Item submission = getChadoDBConverter().createItem("Submission"); submission.setAttribute("title", name); String project = submissionProjectMap.get(submissionId); String projectItemIdentifier = projectIdRefMap.get(project); submission.setReference("project", projectItemIdentifier); String labName = submissionLabMap.get(submissionId); String labItemIdentifier = labIdRefMap.get(labName); submission.setReference("lab", labItemIdentifier); String organismName = submissionOrganismMap.get(submissionId); int divPos = organismName.indexOf(' '); String genus = organismName.substring(0, divPos); String species = organismName.substring(divPos + 1); OrganismRepository or = OrganismRepository.getOrganismRepository(); Integer taxId = Integer.valueOf (or.getOrganismDataByGenusSpecies(genus, species).getTaxonId()); LOG.debug("SPECIES: " + organismName + "|" + taxId); String organismItemIdentifier = getChadoDBConverter().getOrganismItem (or.getOrganismDataByGenusSpecies(genus, species).getTaxonId()).getIdentifier(); submission.setReference("organism", organismItemIdentifier); // ..store all Integer intermineObjectId = getChadoDBConverter().store(submission); // ..and fill the SubmissionDetails object SubmissionDetails details = new SubmissionDetails(); details.interMineObjectId = intermineObjectId; details.itemIdentifier = submission.getIdentifier(); details.labItemIdentifier = labItemIdentifier; details.title = name; submissionMap.put(submissionId, details); debugMap .put(details.itemIdentifier, submission.getClassName()); count++; } LOG.info("created " + count + " submissions"); res.close(); LOG.info("PROCESS TIME submissions: " + (System.currentTimeMillis() - bT)); } /** * Return the rows needed for the submission table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getSubmissions(Connection connection) throws SQLException { String query = "SELECT experiment_id, uniquename" + " FROM experiment"; return doQuery(connection, query, "getSubmissions"); } /** * submission attributes (table experiment_prop) * * @param connection * @throws SQLException * @throws ObjectStoreException */ private void processExperimentProps(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getExperimentProperties(connection); int count = 0; while (res.next()) { Integer submissionId = new Integer(res.getInt("experiment_id")); String heading = res.getString("name"); String value = res.getString("value"); // TODO this is a temporary hack to make sure we get properly matched Experiment.factors // EF are dealt with separately if (heading.startsWith("Experimental Factor")) { continue; } String fieldName = FIELD_NAME_MAP.get(heading); if (fieldName == null) { LOG.error("NOT FOUND in FIELD_NAME_MAP: " + heading + " [experiment]"); continue; } else if (fieldName == NOT_TO_BE_LOADED) { continue; } if (fieldName.equals("DCCid")) { LOG.info("DCC: " + submissionId + ", " + value); dccIdMap.put(submissionId, value); } if (fieldName.equals("pubMedId")) { // sometime in the form PMID:16938558 if (value.contains(":")) { value = value.substring(value.indexOf(':') + 1); } Item pub = getChadoDBConverter().createItem("Publication"); pub.setAttribute(fieldName, value); Integer intermineObjectId = getChadoDBConverter().store(pub); publicationIdMap.put(submissionId, intermineObjectId); publicationIdRefMap.put(submissionId, pub.getIdentifier()); continue; } setAttribute(submissionMap.get(submissionId).interMineObjectId, fieldName, value); count++; } LOG.info("created " + count + " submission properties"); res.close(); LOG.info("PROCESS TIME submission properties: " + (System.currentTimeMillis() - bT)); } /** * Return the rows needed for submission from the experiment_prop table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getExperimentProperties(Connection connection) throws SQLException { String query = "SELECT ep.experiment_id, ep.name, ep.value, ep.rank " + "from experiment_prop ep "; return doQuery(connection, query, "getExperimentProperties"); } private void processEFactor(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getEFactors(connection); int count = 0; int prevRank = -1; int prevSub = -1; ExperimentalFactor ef = null; String name = null; while (res.next()) { Integer submissionId = new Integer(res.getInt("experiment_id")); Integer rank = new Integer(res.getInt("rank")); String value = res.getString("value"); // the data is alternating between EF types and names, in order. if (submissionId != prevSub) { // except for the first record, this is a new EF object if (!res.isFirst()) { submissionEFMap.put(prevSub, ef); } ef = new ExperimentalFactor(); } if (rank != prevRank || submissionId != prevSub) { // this is a name if (getPreferredSynonym(value) != null) { value = getPreferredSynonym(value); } ef.efNames.add(value); name = value; count++; } else { // this is a type ef.efTypes.put(name, value); name = null; if (res.isLast()) { submissionEFMap.put(submissionId, ef); LOG.debug("EF MAP last: " + submissionId + "|" + rank + "|" + ef.efNames); } } prevRank = rank; prevSub = submissionId; } res.close(); LOG.info("created " + count + " experimental factors"); LOG.info("PROCESS TIME experimental factors: " + (System.currentTimeMillis() - bT)); } /** * Return the rows needed for the experimental factors. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getEFactors(Connection connection) throws SQLException { String query = "SELECT ep.experiment_id, ep.name, ep.value, ep.rank " + " FROM experiment_prop ep " + " where ep.name = 'Experimental Factor Name' " + " OR ep.name = 'Experimental Factor Type' " + " ORDER BY 1,4,2"; return doQuery(connection, query, "getEFactors"); } private void processProtocolTable(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getProtocols(connection); int count = 0; while (res.next()) { Integer protocolChadoId = new Integer(res.getInt("protocol_id")); String name = res.getString("name"); String description = res.getString("description"); String wikiLink = res.getString("accession"); Integer version = res.getInt("version"); // needed: it breaks otherwise if (description.length() == 0) { description = "N/A"; } createProtocol(protocolChadoId, name, description, wikiLink, version); count++; } res.close(); LOG.info("created " + count + " protocols"); LOG.info("PROCESS TIME protocols: " + (System.currentTimeMillis() - bT)); } private String createProtocol(Integer chadoId, String name, String description, String wikiLink, Integer version) throws ObjectStoreException { String protocolItemId = protocolsMap.get(wikiLink); if (protocolItemId == null) { Item protocol = getChadoDBConverter().createItem("Protocol"); protocol.setAttribute("name", name); protocol.setAttribute("description", description); protocol.setAttribute("wikiLink", wikiLink); protocol.setAttribute("version", "" + version); Integer intermineObjectId = getChadoDBConverter().store(protocol); protocolItemId = protocol.getIdentifier(); protocolItemToObjectId.put(protocolItemId, intermineObjectId); protocolsMap.put(wikiLink, protocolItemId); } protocolItemIds.put(chadoId, protocolItemId); return protocolItemId; } private Integer getProtocolInterMineId(Integer chadoId) { return protocolItemToObjectId.get(protocolItemIds.get(chadoId)); } /** * Return the rows needed from the protocol table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getProtocols(Connection connection) throws SQLException { String query = "SELECT protocol_id, name, protocol.description, accession, protocol.version" + " FROM protocol, dbxref" + " WHERE protocol.dbxref_id = dbxref.dbxref_id"; return doQuery(connection, query, "getProtocols"); } /** * to store protocol attributes * * @param connection * @throws SQLException * @throws ObjectStoreException */ private void processProtocolAttributes(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getProtocolAttributes(connection); int count = 0; while (res.next()) { Integer protocolId = new Integer(res.getInt("protocol_id")); String heading = res.getString("heading"); String value = res.getString("value"); String fieldName = FIELD_NAME_MAP.get(heading); if (fieldName == null) { LOG.error("NOT FOUND in FIELD_NAME_MAP: " + heading + " [protocol]"); continue; } else if (fieldName == NOT_TO_BE_LOADED) { continue; } setAttribute(getProtocolInterMineId(protocolId), fieldName, value); if (fieldName.equals("type")) { protocolTypesMap.put(protocolId, value); } count++; } LOG.info("created " + count + " protocol attributes"); res.close(); LOG.info("PROCESS TIME protocol attributes: " + (System.currentTimeMillis() - bT)); } /** * Return the rows needed for protocols from the attribute table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getProtocolAttributes(Connection connection) throws SQLException { String query = "SELECT p.protocol_id, a.heading, a.value " + "from protocol p, attribute a, protocol_attribute pa " + "where pa.attribute_id = a.attribute_id " + "and pa.protocol_id = p.protocol_id "; return doQuery(connection, query, "getProtocolAttributes"); } private void processAppliedProtocolTable(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getAppliedProtocols(connection); int count = 0; while (res.next()) { Integer appliedProtocolId = new Integer(res.getInt("applied_protocol_id")); Integer protocolId = new Integer(res.getInt("protocol_id")); Integer submissionId = new Integer(res.getInt("experiment_id")); Item appliedProtocol = getChadoDBConverter().createItem("AppliedProtocol"); // setting references to protocols String protocolItemId = protocolItemIds.get(protocolId); if (protocolId != null) { appliedProtocol.setReference("protocol", protocolItemId); } if (submissionId > 0) { // setting reference to submission // probably to rm (we do it later anyway). TODO: check appliedProtocol.setReference("submission", submissionMap.get(submissionId).itemIdentifier); } // store it and add to maps Integer intermineObjectId = getChadoDBConverter().store(appliedProtocol); appliedProtocolIdMap .put(appliedProtocolId, intermineObjectId); appliedProtocolIdRefMap .put(appliedProtocolId, appliedProtocol.getIdentifier()); count++; } LOG.info("created " + count + " appliedProtocol"); res.close(); LOG.info("PROCESS TIME applied protocols: " + (System.currentTimeMillis() - bT)); } /** * Return the rows needed from the appliedProtocol table. * This is a protected method so that it can be overridden for testing * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getAppliedProtocols(Connection connection) throws SQLException { String query = "SELECT eap.experiment_id ,ap.applied_protocol_id, ap.protocol_id" + " FROM applied_protocol ap" + " LEFT JOIN experiment_applied_protocol eap" + " ON (eap.first_applied_protocol_id = ap.applied_protocol_id )"; /* "SELECT ap.applied_protocol_id, ap.protocol_id, apd.data_id, apd.direction" + " FROM applied_protocol ap, applied_protocol_data apd" + " WHERE apd.applied_protocol_id = ap.applied_protocol_id"; */ return doQuery(connection, query, "getAppliedProtocols"); } private void processAppliedData(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getAppliedData(connection); int count = 0; while (res.next()) { Integer dataId = new Integer(res.getInt("data_id")); String name = res.getString("name"); String heading = res.getString("heading"); String value = res.getString("value"); String typeId = res.getString("type_id"); // check if this datum has an official name: ResultSet oName = getOfficialName(connection, dataId); String officialName = null; while (oName.next()) { officialName = oName.getString(1); } // if there is one, use it instead of the value String datumType = name = getCvterm(connection, typeId); if (!StringUtils.isEmpty(officialName) && doReplaceWithOfficialName(heading, datumType)) { value = officialName; } Item submissionData = getChadoDBConverter().createItem("SubmissionData"); if (name != null && !name.equals("")) { submissionData.setAttribute("name", name); } // if no name for attribute fetch the cvterm of the type if ((name == null || name.equals("")) && typeId != null) { name = getCvterm(connection, typeId); submissionData.setAttribute("name", name); } if (!StringUtils.isEmpty(value)) { submissionData.setAttribute("value", value); } submissionData.setAttribute("type", heading); // store it and add to object and maps Integer intermineObjectId = getChadoDBConverter().store(submissionData); AppliedData aData = new AppliedData(); aData.intermineObjectId = intermineObjectId; aData.itemIdentifier = submissionData.getIdentifier(); aData.value = value; aData.actualValue = res.getString("value"); aData.dataId = dataId; aData.type = heading; aData.name = name; appliedDataMap.put(dataId, aData); count++; } LOG.info("created " + count + " SubmissionData"); res.close(); LOG.info("PROCESS TIME submission data: " + (System.currentTimeMillis() - bT)); } // For some data types we don't want to replace with official name - e.g. file names and // database record ids. It looks like the official name shouldn't actually be present. private boolean doReplaceWithOfficialName(String heading, String type) { if (heading.equals("Result File")) { return false; } if (heading.equals("Result Value") && DB_RECORD_TYPES.contains(type)) { return false; } return true; } /** * Return the rows needed for data from the applied_protocol_data table. * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getAppliedData(Connection connection) throws SQLException { String query = "SELECT d.data_id," + " d.heading, d.name, d.value, d.type_id" + " FROM data d"; return doQuery(connection, query, "getAppliedData"); } /** * Return the rows needed for data from the applied_protocol_data table. * * @param connection the db connection * @param dataId the dataId * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getOfficialName(Connection connection, Integer dataId) throws SQLException { String query = "SELECT a.value " + " from attribute a, data_attribute da " + " where a.attribute_id=da.attribute_id " + " and da.data_id=" + dataId + " and a.heading='official name'"; return doQuery(connection, query); } /** * Fetch a cvterm by id and cache results in cvtermCache. Returns null if the cv terms isn't * found. * @param connection to chado database * @param cvtermId internal chado id for a cvterm * @return the cvterm name or null if not found * @throws SQLException if database access problem */ private String getCvterm(Connection connection, String cvtermId) throws SQLException { String cvTerm = cvtermCache.get(cvtermId); if (cvTerm == null) { String query = "SELECT c.name " + " from cvterm c" + " where c.cvterm_id=" + cvtermId; Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); while (res.next()) { cvTerm = res.getString("name"); } cvtermCache.put(cvtermId, cvTerm); } return cvTerm; } private void processAppliedDataAttributes(Connection connection) throws SQLException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getAppliedDataAttributes(connection); int count = 0; while (res.next()) { Integer dataId = new Integer(res.getInt("data_id")); String name = res.getString("heading"); String value = res.getString("value"); String type = res.getString("name"); Item dataAttribute = getChadoDBConverter().createItem("SubmissionDataAttribute"); if (name != null && !name.equals("")) { dataAttribute.setAttribute("name", name); } if (!StringUtils.isEmpty(value)) { dataAttribute.setAttribute("value", value); } if (!StringUtils.isEmpty(type)) { dataAttribute.setAttribute("type", type); } // setting references to SubmissionData dataAttribute.setReference("submissionData", appliedDataMap.get(dataId).itemIdentifier); getChadoDBConverter().store(dataAttribute); count++; } LOG.info("created " + count + " data attributes"); res.close(); LOG.info("PROCESS TIME data attributes: " + (System.currentTimeMillis() - bT)); } // first value in the list of synonyms is the 'preferred' value private static String[][] synonyms = new String[][]{ new String[] {"developmental stage", "stage", "developmental_stage", "dev stage", "devstage"}, new String[] {"strain", "strain_or_line"}, new String[] {"cell line", "cell_line", "Cell line", "cell id"}, new String[] {"array", "adf"}, new String[] {"compound", "Compound"}, new String[] {"incubation time", "Incubation Time"}, new String[] {"RNAi reagent", "RNAi_reagent", "dsRNA"}, new String[] {"temperature", "temp"} }; private static List<String> makeLookupList(String initialLookup) { for (String[] synonymType : synonyms) { for (String synonym : synonymType) { if (synonym.equals(initialLookup)) { return (List<String>) Arrays.asList(synonymType); } } } return new ArrayList<String>(Collections.singleton(initialLookup)); } private static String getPreferredSynonym(String initialLookup) { return makeLookupList(initialLookup).get(0); } private static Set<String> unifyFactorNames(Collection<String> original) { Set<String> unified = new HashSet<String>(); for (String name : original) { unified.add(getPreferredSynonym(name)); } return unified; } private class SubmissionReference { public SubmissionReference(Integer referencedSubmissionId, String dataValue) { this.referencedSubmissionId = referencedSubmissionId; this.dataValue = dataValue; } private Integer referencedSubmissionId; private String dataValue; } // process new query // get DCC id // add antibody to types private void processSubmissionProperties(Connection connection) throws SQLException, IOException, ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process ResultSet res = getAppliedDataAll(connection); String comma = ","; String reportName = getChadoDBConverter().getDatabase().getName() + "_subs_report.csv"; File f = new File(reportName); FileWriter writer = new FileWriter(f); writer.write("submission" + comma); writer.write("data_heading" + comma); writer.write("data_name" + comma); writer.write("data_value" + comma); writer.write("cv_term" + comma); writer.write("att_heading" + comma); writer.write("att_name" + comma); writer.write("att_value" + comma); writer.write(System.getProperty("line.separator")); SubmissionProperty buildSubProperty = null; Integer lastDataId = new Integer(-1); Map<String, SubmissionProperty> props = new HashMap <String, SubmissionProperty>(); Map<Integer, Map<String, List<SubmissionProperty>>> subToTypes = new HashMap<Integer, Map<String, List<SubmissionProperty>>>(); submissionRefs = new HashMap<Integer, SubmissionReference>(); while (res.next()) { Integer dataId = new Integer(res.getInt("data_id")); String dataHeading = res.getString("data_heading"); String dataName = res.getString("data_name"); String wikiPageUrl = res.getString("data_value"); String cvTerm = res.getString("cv_term"); String attHeading = res.getString("att_heading"); String attName = res.getString("att_name"); String attValue = res.getString("att_value"); String attDbxref = res.getString("att_dbxref"); int attRank = res.getInt("att_rank"); Integer submissionId = dataSubmissionMap.get(dataId); LOG.debug("DCC fetch: " + submissionId + ", " + dccIdMap.get(submissionId)); String dccId = dccIdMap.get(submissionId); writer.write(dccId + comma + dataHeading + comma + dataName + comma + wikiPageUrl + comma + cvTerm + comma + attHeading + comma + attName + comma + attValue + comma + attDbxref + System.getProperty("line.separator")); if (submissionId == null) { LOG.warn("Failed to find a submission id for data id " + dataId + " - this probably" + " means there is a problem with the applied_protocol DAG strucuture."); continue; } // Currently using attValue for referenced submission DCC id, should be dbUrl but seems // to be filled in incorrectly if (attHeading != null && attHeading.startsWith("modENCODE Reference")) { if (attValue.indexOf(":") > 0) { attValue = attValue.substring(0, attValue.indexOf(":")); } Integer referencedSubId = getSubmissionIdFromDccId(attValue); if (referencedSubId != null) { SubmissionReference subRef = new SubmissionReference(referencedSubId, wikiPageUrl); submissionRefs.put(submissionId, subRef); LOG.info("Submission " + dccId + " (" + submissionId + ") has reference to " + attValue + " (" + referencedSubId + ")"); } else { LOG.warn("Could not find submission " + attValue + " referenced by " + dccId); } } // we are starting a new data row if (dataId.intValue() != lastDataId.intValue()) { // have we seen this modencodewiki entry before? if (props.containsKey(wikiPageUrl)) { buildSubProperty = null; } else { buildSubProperty = new SubmissionProperty(getPreferredSynonym(dataName), wikiPageUrl); props.put(wikiPageUrl, buildSubProperty); } // submissionId -> [type -> SubmissionProperty] addToSubToTypes(subToTypes, submissionId, props.get(wikiPageUrl)); } if (buildSubProperty != null) { // we are building a new submission attribute, this is the first time we have // seen a data.value that points to modencodewiki buildSubProperty.addDetail(attHeading, attValue, attRank); } lastDataId = dataId; } writer.flush(); writer.close(); // Characteristics are modelled differently to protocol inputs/outputs, read in extra // properties here addSubmissionPropsFromCharacteristics(subToTypes, connection); // some submissions use reagents created in reference submissions, find the properties // of the reagents and add to referencing submission addSubmissionPropsFromReferencedSubmissions(subToTypes, props, submissionRefs); // create and store properties of submission for (Integer submissionId : subToTypes.keySet()) { Integer storedSubmissionId = submissionMap.get(submissionId).interMineObjectId; Map<String, List<SubmissionProperty>> typeToProp = subToTypes.get(submissionId); String dccId = dccIdMap.get(submissionId); ExperimentalFactor ef = submissionEFMap.get(submissionId); if (ef == null) { LOG.warn("No experimental factors found for submission: " + dccId); continue; } Set<String> exFactorNames = unifyFactorNames(ef.efNames); LOG.info("PROPERTIES " + dccId + " typeToProp keys: " + typeToProp.keySet()); List<Item> allPropertyItems = new ArrayList<Item>(); // DEVELOPMENTAL STAGE List<Item> devStageItems = new ArrayList<Item>(); devStageItems.addAll(createFromWikiPage(dccId, "DevelopmentalStage", typeToProp, makeLookupList("developmental stage"))); if (devStageItems.isEmpty()) { devStageItems.addAll(lookForAttributesInOtherWikiPages(dccId, "DevelopmentalStage", typeToProp, new String[] { "developmental stage.developmental stage", "tissue.developmental stage", "tissue source.developmental stage", "cell line.developmental stage", "cell id.developmental stage" })); if (!devStageItems.isEmpty()) { } } storeSubmissionCollection(storedSubmissionId, "developmentalStages", devStageItems); if (!devStageItems.isEmpty() && exFactorNames.contains("developmental stage")) { createExperimentalFactors(submissionId, "developmental stage", devStageItems); exFactorNames.remove("developmental stage"); } allPropertyItems.addAll(devStageItems); // STRAIN List<Item> strainItems = new ArrayList<Item>(); strainItems.addAll(createFromWikiPage( dccId, "Strain", typeToProp, makeLookupList("strain"))); storeSubmissionCollection(storedSubmissionId, "strains", strainItems); if (!strainItems.isEmpty() && exFactorNames.contains("strain")) { createExperimentalFactors(submissionId, "strain", strainItems); exFactorNames.remove("strain"); } allPropertyItems.addAll(strainItems); // ARRAY List<Item> arrayItems = new ArrayList<Item>(); arrayItems.addAll(createFromWikiPage( dccId, "Array", typeToProp, makeLookupList("array"))); LOG.debug("ARRAY: " + typeToProp.get("array")); if (arrayItems.isEmpty()) { arrayItems.addAll(lookForAttributesInOtherWikiPages(dccId, "Array", typeToProp, new String[] { "adf.official name" })); if (!arrayItems.isEmpty()) { LOG.debug("Attribute found in other wiki pages: " + dccId + " ARRAY "); } } storeSubmissionCollection(storedSubmissionId, "arrays", arrayItems); if (!arrayItems.isEmpty() && exFactorNames.contains("array")) { createExperimentalFactors(submissionId, "array", arrayItems); exFactorNames.remove("array"); } allPropertyItems.addAll(arrayItems); // CELL LINE List<Item> lineItems = new ArrayList<Item>(); lineItems.addAll(createFromWikiPage(dccId, "CellLine", typeToProp, makeLookupList("cell line"))); storeSubmissionCollection(storedSubmissionId, "cellLines", lineItems); if (!lineItems.isEmpty() && exFactorNames.contains("cell line")) { createExperimentalFactors(submissionId, "cell line", lineItems); exFactorNames.remove("cell line"); } allPropertyItems.addAll(lineItems); // RNAi REAGENT List<Item> reagentItems = new ArrayList<Item>(); reagentItems.addAll(createFromWikiPage(dccId, "SubmissionProperty", typeToProp, makeLookupList("dsRNA"))); if (!reagentItems.isEmpty() && exFactorNames.contains("RNAi reagent")) { createExperimentalFactors(submissionId, "RNAi reagent", reagentItems); exFactorNames.remove("RNAi reagent"); } allPropertyItems.addAll(reagentItems); // ANTIBODY List<Item> antibodyItems = new ArrayList<Item>(); antibodyItems.addAll(createFromWikiPage(dccId, "Antibody", typeToProp, makeLookupList("antibody"))); if (antibodyItems.isEmpty()) { LOG.debug("ANTIBODY: " + typeToProp.get("antibody")); antibodyItems.addAll(lookForAttributesInOtherWikiPages(dccId, "Antibody", typeToProp, new String[] { "antibody.official name" })); if (!antibodyItems.isEmpty()) { LOG.debug("Attribute found in other wiki pages: " + dccId + " ANTIBODY "); } } storeSubmissionCollection(storedSubmissionId, "antibodies", antibodyItems); if (!antibodyItems.isEmpty() && exFactorNames.contains("antibody")) { createExperimentalFactors(submissionId, "antibody", antibodyItems); exFactorNames.remove("antibody"); } allPropertyItems.addAll(antibodyItems); // TISSUE List<Item> tissueItems = new ArrayList<Item>(); tissueItems.addAll(createFromWikiPage( dccId, "Tissue", typeToProp, makeLookupList("tissue"))); if (tissueItems.isEmpty()) { tissueItems.addAll(lookForAttributesInOtherWikiPages(dccId, "Tissue", typeToProp, new String[] { "stage.tissue" , "cell line.tissue" , "cell id.tissue" })); if (!tissueItems.isEmpty()) { LOG.info("Attribute found in other wiki pages: " + dccId + " TISSUE"); } } storeSubmissionCollection(storedSubmissionId, "tissues", tissueItems); if (!tissueItems.isEmpty() && exFactorNames.contains("tissue")) { createExperimentalFactors(submissionId, "tissue", tissueItems); exFactorNames.remove("tissue"); } allPropertyItems.addAll(tissueItems); // There may be some other experimental factors that require SubmissionProperty objects // but don't fall into the categories above. Create them here and set experimental // factors. ArrayList<String> extraPropNames = new ArrayList<String>(exFactorNames); for (String exFactor : extraPropNames) { List<Item> extraPropItems = new ArrayList<Item>(); extraPropItems.addAll(lookForAttributesInOtherWikiPages(dccId, "SubmissionProperty", typeToProp, new String[] {exFactor})); allPropertyItems.addAll(extraPropItems); createExperimentalFactors(submissionId, exFactor, extraPropItems); exFactorNames.remove(exFactor); } // Store Submission.properties/ SubmissionProperty.submissions storeSubmissionCollection(storedSubmissionId, "properties", allPropertyItems); // deal with remaining factor names (e.g. the ones for which we did // not find a corresponding attribute for (String exFactor : exFactorNames) { String type = ef.efTypes.get(exFactor); createEFItem(submissionId, type, exFactor, null); } } LOG.info("PROCESS TIME submission properties: " + (System.currentTimeMillis() - bT)); } // Traverse DAG following previous applied protocol links to build a list of all AppliedData private void findAppliedProtocolsAndDataFromEarlierInDag(Integer startDataId, List<AppliedData> foundAppliedData, List<AppliedProtocol> foundAppliedProtocols) { AppliedData aData = appliedDataMap.get(startDataId); if (foundAppliedData != null) { foundAppliedData.add(aData); } for (Integer previousAppliedProtocolId : aData.previousAppliedProtocols) { AppliedProtocol ap = appliedProtocolMap.get(previousAppliedProtocolId); if (foundAppliedProtocols != null) { foundAppliedProtocols.add(ap); } for (Integer previousDataId : ap.inputs) { findAppliedProtocolsAndDataFromEarlierInDag(previousDataId, foundAppliedData, foundAppliedProtocols); } } } private void createExperimentalFactors(Integer submissionId, String type, Collection<Item> items) throws ObjectStoreException { for (Item item : items) { createEFItem(submissionId, type, item.getAttribute("name").getValue(), item.getIdentifier()); } } private void createEFItem(Integer current, String type, String efName, String propertyIdentifier) throws ObjectStoreException { // create the EF, if not there already if (!eFactorIdMap.containsKey(efName)) { Item ef = getChadoDBConverter().createItem("ExperimentalFactor"); String preferredType = getPreferredSynonym(type); ef.setAttribute ("type", preferredType); ef.setAttribute ("name", efName); if (propertyIdentifier != null) { ef.setReference("property", propertyIdentifier); } LOG.debug("ExFactor created for sub " + current + ":" + efName + "|" + type); Integer intermineObjectId = getChadoDBConverter().store(ef); eFactorIdMap.put(efName, intermineObjectId); eFactorIdRefMap.put(efName, ef.getIdentifier()); } // if pertinent to the current sub, add to the map for the references addToMap(submissionEFactorMap, current, efName); } private void addToSubToTypes(Map<Integer, Map<String, List<SubmissionProperty>>> subToTypes, Integer submissionId, SubmissionProperty prop) { // submissionId -> [type -> SubmissionProperty] if (submissionId == null) { throw new RuntimeException("Called addToSubToTypes with a null sub id!"); } Map<String, List<SubmissionProperty>> typeToSubProp = subToTypes.get(submissionId); if (typeToSubProp == null) { typeToSubProp = new HashMap<String, List<SubmissionProperty>>(); subToTypes.put(submissionId, typeToSubProp); } List<SubmissionProperty> subProps = typeToSubProp.get(prop.type); if (subProps == null) { subProps = new ArrayList<SubmissionProperty>(); typeToSubProp.put(prop.type, subProps); } subProps.add(prop); } private void addSubmissionPropsFromCharacteristics( Map<Integer, Map<String, List<SubmissionProperty>>> subToTypes, Connection connection) throws SQLException { ResultSet res = getAppliedDataCharacteristics(connection); Integer lastAttDbXref = new Integer(-1); Integer lastDataId = new Integer(-1); Map<Integer, SubmissionProperty> createdProps = new HashMap<Integer, SubmissionProperty>(); SubmissionProperty buildSubProperty = null; boolean isValidCharacteristic = false; Integer currentSubId = null; // we need those to attach the property to the correct sub Integer previousSubId = null; while (res.next()) { Integer dataId = new Integer(res.getInt("data_id")); String attHeading = res.getString("att_heading"); String attName = res.getString("att_name"); String attValue = res.getString("att_value"); Integer attDbxref = new Integer(res.getInt("att_dbxref")); int attRank = res.getInt("att_rank"); currentSubId = dataSubmissionMap.get(dataId); if (dataId.intValue() != lastDataId.intValue() || attDbxref.intValue() != lastAttDbXref.intValue() || currentSubId != previousSubId) { // store the last build property if created, type is set only if we found an // attHeading of Characteristics // note: dbxref can remain the same in different subs -> or if (buildSubProperty != null && buildSubProperty.type != null) { LOG.info("XXX11if ADD PREVIOUS" + lastAttDbXref + "|" + buildSubProperty); createdProps.put(lastAttDbXref, buildSubProperty); // createdProps.put(lastDataId, buildSubProperty); addToSubToTypes(subToTypes, previousSubId, buildSubProperty); } // set up for next attDbxref if (createdProps.containsKey(attDbxref) && isValidCharacteristic) { // if (createdProps.containsKey(dataId) && isValidCharacteristic) { // LOG.info("XXX12if ADD CURRENT" + attDbxref+"|"+ createdProps.get(attDbxref)); // seen this property before so just add for this submission, don't build again buildSubProperty = null; isValidCharacteristic = false; addToSubToTypes(subToTypes, currentSubId, createdProps.get(attDbxref)); // addToSubToTypes(subToTypes, currentSubId, createdProps.get(dataId)); } else { // LOG.info("XXX12else NEW SP"); buildSubProperty = new SubmissionProperty(); isValidCharacteristic = false; } } if (attHeading.startsWith("Characteristic")) { // LOG.info("XXX2if Characteristic"); isValidCharacteristic = true; } if (buildSubProperty != null) { if (attHeading.startsWith("Characteristic")) { buildSubProperty.type = getPreferredSynonym(attName); buildSubProperty.wikiPageUrl = attValue; // add detail here as some Characteristics don't reference a wiki page // but have all information on single row buildSubProperty.addDetail(attName, attValue, attRank); } else { // LOG.info("XXX3else ADD for OTHER"+ attValue + "|"+ attHeading); buildSubProperty.addDetail(attHeading, attValue, attRank); } } previousSubId = currentSubId; lastAttDbXref = attDbxref; lastDataId = dataId; } if (buildSubProperty != null && buildSubProperty.type != null) { // LOG.info("XXX3end "+ lastAttDbXref + "|"+ buildSubProperty); // LOG.info("XXX32end " +currentSubId+ "|"+ subToTypes); createdProps.put(lastAttDbXref, buildSubProperty); // createdProps.put(lastDataId, buildSubProperty); addToSubToTypes(subToTypes, currentSubId, buildSubProperty); } } // Some submission mention e.g. an RNA Sample but the details of how that sample was created, // stage, strain, etc are in a previous submission. There are references to previous submission // DCC ids where a sample with the corresponding name can be found. We then need to traverse // backwards along the AppliedProtocol DAG to find the stage, strain, etc wiki pages. These // should already have been processed so the properties can just be added to the referencing // submission. private void addSubmissionPropsFromReferencedSubmissions( Map<Integer, Map<String, List<SubmissionProperty>>> subToTypes, Map<String, SubmissionProperty> props, Map<Integer, SubmissionReference> submissionRefs) { for (Map.Entry<Integer, SubmissionReference> entry : submissionRefs.entrySet()) { Integer submissionId = entry.getKey(); SubmissionReference subRef = entry.getValue(); List<AppliedData> refAppliedData = findAppliedDataFromReferencedSubmission(subRef); for (AppliedData aData : refAppliedData) { String possibleWikiUrl = aData.actualValue; if (possibleWikiUrl != null && props.containsKey(possibleWikiUrl)) { SubmissionProperty propFromReferencedSub = props.get(possibleWikiUrl); addToSubToTypes(subToTypes, submissionId, propFromReferencedSub); } } } } private List<AppliedData> findAppliedDataFromReferencedSubmission(SubmissionReference subRef) { List<AppliedData> foundAppliedData = new ArrayList<AppliedData>(); findAppliedProtocolsAndDataFromReferencedSubmission(subRef, foundAppliedData, null); return foundAppliedData; } private List<AppliedProtocol> findAppliedProtocolsFromReferencedSubmission( SubmissionReference subRef) { List<AppliedProtocol> foundAppliedProtocols = new ArrayList<AppliedProtocol>(); findAppliedProtocolsAndDataFromReferencedSubmission(subRef, null, foundAppliedProtocols); return foundAppliedProtocols; } private void findAppliedProtocolsAndDataFromReferencedSubmission( SubmissionReference subRef, List<AppliedData> foundAppliedData, List<AppliedProtocol> foundAppliedProtocols) { String refDataValue = subRef.dataValue; Integer refSubId = subRef.referencedSubmissionId; for (AppliedData aData : appliedDataMap.values()) { String currentDataValue = aData.value; Integer currentDataSubId = dataSubmissionMap.get(aData.dataId); if (refDataValue.equals(currentDataValue) && refSubId.equals(currentDataSubId)) { LOG.info("REFSUBS found a matching data value: " + currentDataValue + " in sub " + dccIdMap.get(currentDataSubId) + " ref sub = " + dccIdMap.get(refSubId)); Integer foundDataId = aData.dataId; findAppliedProtocolsAndDataFromEarlierInDag(foundDataId, foundAppliedData, foundAppliedProtocols); } } } private List<Item> createFromWikiPage(String dccId, String clsName, Map<String, List<SubmissionProperty>> typeToProp, List<String> types) throws ObjectStoreException { List<Item> items = new ArrayList<Item>(); List<SubmissionProperty> props = new ArrayList<SubmissionProperty>(); for (String type : types) { if (typeToProp.containsKey(type)) { props.addAll(typeToProp.get(type)); } } items.addAll(createItemsForSubmissionProperties(dccId, clsName, props)); return items; } private void storeSubmissionCollection(Integer storedSubmissionId, String name, List<Item> items) throws ObjectStoreException { if (!items.isEmpty()) { ReferenceList refList = new ReferenceList(name, getIdentifiersFromItems(items)); getChadoDBConverter().store(refList, storedSubmissionId); } } private List<String> getIdentifiersFromItems(Collection<Item> items) { List<String> ids = new ArrayList<String>(); for (Item item : items) { ids.add(item.getIdentifier()); } return ids; } private List<Item> createItemsForSubmissionProperties(String dccId, String clsName, List<SubmissionProperty> subProps) throws ObjectStoreException { List<Item> items = new ArrayList<Item>(); for (SubmissionProperty subProp : subProps) { Item item = getItemForSubmissionProperty(clsName, subProp, dccId); if (item != null) { items.add(item); } } return items; } private Item getItemForSubmissionProperty(String clsName, SubmissionProperty prop, String dccId) throws ObjectStoreException { Item propItem = subItemsMap.get(prop.wikiPageUrl); if (propItem == null) { if (clsName != null) { List<String> checkOfficialName = prop.details.get("official name"); if (checkOfficialName == null) { LOG.warn("No 'official name', using 'name' instead for: " + prop.wikiPageUrl); checkOfficialName = prop.details.get("name"); } if (checkOfficialName == null) { LOG.info("Official name - missing for property: " + prop.type + ", " + prop.wikiPageUrl); return null; } else if (checkOfficialName.size() != 1) { LOG.info("Official name - multiple times for property: " + prop.type + ", " + prop.wikiPageUrl + ", " + checkOfficialName); } String officialName = getCorrectedOfficialName(prop); propItem = createSubmissionProperty(clsName, officialName); propItem.setAttribute("type", getPreferredSynonym(prop.type)); propItem.setAttribute("wikiLink", WIKI_URL + prop.wikiPageUrl); if (clsName.equals("DevelopmentalStage")) { setAttributeOnProp(prop, propItem, "sex", "sex"); List<String> devStageValues = prop.details.get("developmental stage"); if (devStageValues != null) { for (String devStageValue : devStageValues) { propItem.addToCollection("ontologyTerms", getDevStageTerm(devStageValue, dccId)); } } else { LOG.error("METADATA FAIL: no 'developmental stage' values for wiki page: " + prop.wikiPageUrl); } } else if (clsName.equals("Antibody")) { setAttributeOnProp(prop, propItem, "antigen", "antigen"); setAttributeOnProp(prop, propItem, "host", "hostOrganism"); setAttributeOnProp(prop, propItem, "target name", "targetName"); setGeneItem(dccId, prop, propItem, "Antibody"); } else if (clsName.equals("Array")) { setAttributeOnProp(prop, propItem, "platform", "platform"); setAttributeOnProp(prop, propItem, "resolution", "resolution"); setAttributeOnProp(prop, propItem, "genome", "genome"); } else if (clsName.equals("CellLine")) { setAttributeOnProp(prop, propItem, "sex", "sex"); setAttributeOnProp(prop, propItem, "short description", "description"); setAttributeOnProp(prop, propItem, "species", "species"); setAttributeOnProp(prop, propItem, "tissue", "tissue"); setAttributeOnProp(prop, propItem, "cell type", "cellType"); setAttributeOnProp(prop, propItem, "target name", "targetName"); setGeneItem(dccId, prop, propItem, "CellLine"); } else if (clsName.equals("Strain")) { setAttributeOnProp(prop, propItem, "species", "species"); setAttributeOnProp(prop, propItem, "source", "source"); // the following 2 should be mutually exclusive setAttributeOnProp(prop, propItem, "Description", "description"); setAttributeOnProp(prop, propItem, "details", "description"); setAttributeOnProp(prop, propItem, "aliases", "name"); setAttributeOnProp(prop, propItem, "reference", "reference"); setAttributeOnProp(prop, propItem, "target name", "targetName"); setGeneItem(dccId, prop, propItem, "Strain"); } else if (clsName.equals("Tissue")) { setAttributeOnProp(prop, propItem, "species", "species"); setAttributeOnProp(prop, propItem, "sex", "sex"); setAttributeOnProp(prop, propItem, "organismPart", "organismPart"); } getChadoDBConverter().store(propItem); } subItemsMap.put(prop.wikiPageUrl, propItem); } return propItem; } private void setGeneItem(String dccId, SubmissionProperty prop, Item propItem, String source) throws ObjectStoreException { String targetText = null; String[] possibleTypes = new String[] {"target id"}; for (String targetType : possibleTypes) { if (prop.details.containsKey(targetType)) { if (prop.details.get(targetType).size() != 1) { // we used to complain if multiple values, now only // if they don't have the same value checkIfSameValue(prop, source, targetType); } targetText = prop.details.get(targetType).get(0); break; } } if (targetText != null) { // if no target name was found use the target id if (!propItem.hasAttribute("targetName")) { propItem.setAttribute("targetName", targetText); } String geneItemId = getTargetGeneItemIdentfier(targetText, dccId); if (geneItemId != null) { propItem.setReference("target", geneItemId); } } } /** * @param prop * @param source * @param targetType * @throws RuntimeException */ private void checkIfSameValue(SubmissionProperty prop, String source, String targetType) throws RuntimeException { String value = prop.details.get(targetType).get(0); for (int i = 1; i < prop.details.get(targetType).size(); i++) { String newValue = prop.details.get(targetType).get(i); if (!newValue.equals(value)) { throw new RuntimeException(source + " should only have one value for '" + targetType + "' field: " + prop.details.get(targetType)); } } } private void setAttributeOnProp(SubmissionProperty subProp, Item item, String metadataName, String attributeName) { if (subProp.details.containsKey(metadataName)) { if (metadataName.equalsIgnoreCase("aliases")) { for (String s :subProp.details.get(metadataName)) { if (s.equalsIgnoreCase("yellow cinnabar brown speck")) { // swapping name with fullName String full = item.getAttribute("name").getValue(); item.setAttribute("fullName", full); item.setAttribute(attributeName, s); break; } } } else if (metadataName.equalsIgnoreCase("description") || metadataName.equalsIgnoreCase("details")) { // description is often split in more than 1 line, details should be correct order StringBuffer sb = new StringBuffer(); for (String desc : subProp.details.get(metadataName)) { sb.append(desc); } if (sb.length() > 0) { item.setAttribute(attributeName, sb.toString()); } } else { String value = subProp.details.get(metadataName).get(0); item.setAttribute(attributeName, value); } } } private String getTargetGeneItemIdentfier(String geneTargetIdText, String dccId) throws ObjectStoreException { String taxonId = ""; String originalId = null; String flyPrefix = "fly_genes:"; String wormPrefix = "worm_genes:"; if (geneTargetIdText.startsWith(flyPrefix)) { originalId = geneTargetIdText.substring(flyPrefix.length()); taxonId = "7227"; } else if (geneTargetIdText.startsWith(wormPrefix)) { originalId = geneTargetIdText.substring(wormPrefix.length()); taxonId = "6239"; } else { // attempt to work out the organism from the submission taxonId = getTaxonIdForSubmission(dccId); originalId = geneTargetIdText; LOG.info("RESOLVER: found taxon " + taxonId + " for submission " + dccId); } IdResolver resolver = null; if (taxonId.equals("7227")) { resolver = flyResolverFactory.getIdResolver(); } else if (taxonId.equals("6239")) { resolver = wormResolverFactory.getIdResolver(); } else { LOG.info("RESOLVER: unable to work out organism for target id text: " + geneTargetIdText); } String geneItemId = null; String primaryIdentifier = resolveGene(originalId, taxonId, resolver); if (primaryIdentifier != null) { geneItemId = geneToItemIdentifier.get(primaryIdentifier); if (geneItemId == null) { Item gene = getChadoDBConverter().createItem("Gene"); geneItemId = gene.getIdentifier(); gene.setAttribute("primaryIdentifier", primaryIdentifier); getChadoDBConverter().store(gene); geneToItemIdentifier.put(primaryIdentifier, geneItemId); } else { LOG.info("RESOLVER fetched gene from cache: " + primaryIdentifier); } } return geneItemId; } private String resolveGene(String originalId, String taxonId, IdResolver resolver) { String primaryIdentifier = null; int resCount = resolver.countResolutions(taxonId, originalId); if (resCount != 1) { LOG.info("RESOLVER: failed to resolve gene to one identifier, ignoring " + "gene: " + originalId + " for organism " + taxonId + " count: " + resCount + " found ids: " + resolver.resolveId(taxonId, originalId) + "."); } else { primaryIdentifier = resolver.resolveId(taxonId, originalId).iterator().next(); LOG.info("RESOLVER found gene " + primaryIdentifier + " for original id: " + originalId); } return primaryIdentifier; } private List<Item> lookForAttributesInOtherWikiPages(String dccId, String clsName, Map<String, List<SubmissionProperty>> typeToProp, String[] lookFor) throws ObjectStoreException { List<Item> items = new ArrayList<Item>(); for (String typeProp : lookFor) { if (typeProp.indexOf(".") > 0) { String[] bits = StringUtils.split(typeProp, '.'); String type = bits[0]; String propName = bits[1]; if (typeToProp.containsKey(type)) { for (SubmissionProperty subProp : typeToProp.get(type)) { if (subProp.details.containsKey(propName)) { for (String value : subProp.details.get(propName)) { items.add(createNonWikiSubmissionPropertyItem(dccId, clsName, getPreferredSynonym(propName), correctAttrValue(value))); } } } if (!items.isEmpty()) { break; } } } else { // no attribute type given so use the data.value (SubmissionProperty.wikiPageUrl) // which probably won't be a wiki page if (typeToProp.containsKey(typeProp)) { for (SubmissionProperty subProp : typeToProp.get(typeProp)) { String value = subProp.wikiPageUrl; // This is an ugly special case to deal with 'exposure time/24 hours' if (subProp.details.containsKey("Unit")) { String unit = subProp.details.get("Unit").get(0); value = value + " " + unit + (unit.endsWith("s") ? "" : "s"); } items.add(createNonWikiSubmissionPropertyItem(dccId, clsName, subProp.type, correctAttrValue(value))); } } } } return items; } private String correctAttrValue(String value) { if (value == null) { return null; } value = value.replace("–", "-"); return value; } private Item createNonWikiSubmissionPropertyItem(String dccId, String clsName, String type, String name) throws ObjectStoreException { if (clsName.equals("DevelopmentalStage")) { name = correctDevStageTerm(name); } Item item = nonWikiSubmissionProperties.get(name); if (item == null) { item = createSubmissionProperty(clsName, name); item.setAttribute("type", getPreferredSynonym(type)); if (clsName.equals("DevelopmentalStage")) { String ontologyTermId = getDevStageTerm(name, dccId); item.addToCollection("ontologyTerms", ontologyTermId); } getChadoDBConverter().store(item); nonWikiSubmissionProperties.put(name, item); } return item; } private Item createSubmissionProperty(String clsName, String name) { Item subProp = getChadoDBConverter().createItem(clsName); if (name != null) { subProp.setAttribute("name", name); } return subProp; } private String getCorrectedOfficialName(SubmissionProperty prop) { String preferredType = getPreferredSynonym(prop.type); String name = null; if (prop.details.containsKey("official name")) { name = prop.details.get("official name").get(0); } else if (prop.details.containsKey("name")) { name = prop.details.get("name").get(0); } else { // no official name so maybe there is a key that matches the type - sometimes the // setup for Characteristics for (String lookup : makeLookupList(prop.type)) { if (prop.details.containsKey(lookup)) { name = prop.details.get(lookup).get(0); } } } return correctOfficialName(name, preferredType); } /** * Unify variations on similar official names. * @param name the original 'official name' value * @param type the treatment depends on the type * @return a unified official name */ protected String correctOfficialName(String name, String type) { if (name == null) { return null; } if (type.equals("developmental stage")) { name = name.replace("_", " "); name = name.replaceFirst("embryo", "Embryo"); name = name.replaceFirst("Embyro", "Embryo"); if (name.matches("E\\d.*")) { name = name.replaceFirst("^E", "Embryo "); } if (name.matches("Embryo.*\\d")) { name = name + " h"; } if (name.matches(".*hr")) { name = name.replace("hr", "h"); } if (name.matches("Embryo.*\\dh")) { name = name.replaceFirst("h", " h"); } if (name.startsWith("DevStage:")) { name = name.replaceFirst("DevStage:", "").trim(); } if (name.matches("L\\d")) { name = name + " stage larvae"; } if (name.matches(".*L\\d")) { name = name + " stage larvae"; } if (name.matches("WPP.*")) { name = name.replaceFirst("WPP", "White prepupae (WPP)"); } } return name; } private String getDevStageTerm(String value, String dccId) throws ObjectStoreException { value = correctDevStageTerm(value); // there may be duplicate terms for fly and worm, include taxon in key String taxonId = getTaxonIdForSubmission(dccId); OrganismRepository or = OrganismRepository.getOrganismRepository(); String genus = or.getOrganismDataByTaxon(Integer.parseInt(taxonId)).getGenus(); String key = value + "_" + genus; String identifier = devStageTerms.get(key); if (identifier == null) { Item term = getChadoDBConverter().createItem("OntologyTerm"); term.setAttribute("name", value); String ontologyRef = getDevelopmentOntologyByTaxon(taxonId); if (ontologyRef != null) { term.setReference("ontology", ontologyRef); } getChadoDBConverter().store(term); devStageTerms.put(key, term.getIdentifier()); identifier = term.getIdentifier(); } return identifier; } private String correctDevStageTerm(String value) { // some terms are prefixed with ontology namespace String prefix = "FlyBase development CV:"; if (value.startsWith(prefix)) { value = value.substring(prefix.length()); } return value; } private String getTaxonIdForSubmission(String dccId) { Integer subChadoId = getSubmissionIdFromDccId(dccId); String organism = submissionOrganismMap.get(subChadoId); OrganismRepository or = OrganismRepository.getOrganismRepository(); return "" + or.getOrganismDataByFullName(organism).getTaxonId(); } private String getDevelopmentOntologyByTaxon(String taxonId) throws ObjectStoreException { if (taxonId == null) { return null; } String ontologyName = null; OrganismRepository or = OrganismRepository.getOrganismRepository(); String genus = or.getOrganismDataByTaxon(Integer.parseInt(taxonId)).getGenus(); if (genus.equals("Drosophila")) { ontologyName = "Fly Development"; } else { ontologyName = "Worm Development"; } String ontologyId = devOntologies.get(ontologyName); if (ontologyId == null) { Item ontology = getChadoDBConverter().createItem("Ontology"); ontology.setAttribute("title", ontologyName); getChadoDBConverter().store(ontology); ontologyId = ontology.getIdentifier(); devOntologies.put(ontologyName, ontologyId); } return ontologyId; } private Integer getSubmissionIdFromDccId(String dccId) { for (Map.Entry<Integer, String> entry : dccIdMap.entrySet()) { if (entry.getValue().equals(dccId)) { return entry.getKey(); } } return null; } /** * Return the rows needed for data from the applied_protocol_data table. * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getAppliedDataAll(Connection connection) throws SQLException { String sraAcc = "SRA acc"; String query = "SELECT d.data_id, d.heading as data_heading," + " d.name as data_name, d.value as data_value," + " c.name as cv_term," + " a.attribute_id, a.heading as att_heading, a.name as att_name, a.value as att_value," + " a.dbxref_id as att_dbxref, a.rank as att_rank" + " FROM data d" + " LEFT JOIN data_attribute da ON (d.data_id = da.data_id)" + " LEFT JOIN attribute a ON (da.attribute_id = a.attribute_id)" + " LEFT JOIN cvterm c ON (d.type_id = c.cvterm_id)" + " LEFT JOIN dbxref as x ON (a.dbxref_id = x.dbxref_id)" + " WHERE d.name != '" + sraAcc + "'" + " AND d.value != '' " + " ORDER BY d.data_id"; return doQuery(connection, query, "getAppliedDataAll"); } /** * Return the rows needed for data from the applied_protocol_data table. * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getAppliedDataCharacteristics(Connection connection) throws SQLException { String query = "select d.data_id, d.heading as data_heading," + " d.name as data_name, d.value as data_value," + " a.attribute_id, a.heading as att_heading, a.name as att_name, a.value as att_value," + " a.dbxref_id as att_dbxref, a.rank as att_rank" + " FROM data d, data_attribute da, attribute a, dbxref ax, db" + " WHERE d.data_id = da.data_id" + " AND da.attribute_id = a.attribute_id" + " AND a.dbxref_id = ax.dbxref_id" + " AND ax.db_id = db.db_id" + " ORDER BY d.data_id, a.dbxref_id "; return doQuery(connection, query, "getAppliedDataCharacteristics"); } private class SubmissionProperty { protected String type; protected String wikiPageUrl; protected Map<String, List<String>> details; protected SubmissionProperty() { details = new HashMap<String, List<String>>(); } public SubmissionProperty(String type, String wikiPageUrl) { this.type = type; this.wikiPageUrl = wikiPageUrl; details = new HashMap<String, List<String>>(); } public void addDetail(String type, String value, int rank) { List<String> values = details.get(type); if (values == null) { values = new ArrayList<String>(); details.put(type, values); } while (values.size() <= rank) { values.add(null); } values.set(rank, value); } public String toString() { return this.type + ": " + this.wikiPageUrl + this.details.entrySet(); } } private class DatabaseRecordConfig { private String dbName; private String dbDescrition; private String dbURL; private Set<String> types = new HashSet<String>();; } private Set<DatabaseRecordConfig> initDatabaseRecordConfigs() { Set<DatabaseRecordConfig> configs = new HashSet<DatabaseRecordConfig>(); DatabaseRecordConfig geo = new DatabaseRecordConfig(); geo.dbName = "GEO"; geo.dbDescrition = "Gene Expression Omnibus (NCBI)"; geo.dbURL = "http: geo.types.add("GEO_record"); configs.add(geo); DatabaseRecordConfig ae = new DatabaseRecordConfig(); ae.dbName = "ArrayExpress"; ae.dbDescrition = "ArrayExpress (EMBL-EBI)"; ae.dbURL = "http: ae.types.add("ArrayExpress_record"); configs.add(ae); DatabaseRecordConfig sra = new DatabaseRecordConfig(); sra.dbName = "SRA"; sra.dbDescrition = "Sequence Read Archive (NCBI)"; sra.dbURL = "http: sra.types.add("ShortReadArchive_project_ID_list (SRA)"); sra.types.add("ShortReadArchive_project_ID (SRA)"); configs.add(sra); DatabaseRecordConfig ta = new DatabaseRecordConfig(); ta.dbName = "Trace Archive"; ta.dbDescrition = "Trace Archive (NCBI)"; ta.dbURL = "http: ta.types.add("TraceArchive_record"); configs.add(ta); DatabaseRecordConfig de = new DatabaseRecordConfig(); de.dbName = "dbEST"; de.dbDescrition = "Expressed Sequence Tags database (NCBI)"; de.dbURL = "http: de.types.add("dbEST_record"); configs.add(de); return configs; } /** * Query to get data attributes * This is a protected method so that it can be overridden for testing. * * * @param connection the db connection * @return the SQL result set * @throws SQLException if a database problem occurs */ protected ResultSet getAppliedDataAttributes(Connection connection) throws SQLException { String query = "select da.data_id, a.heading, a.value, a.name " + " from data_attribute da, attribute a" + " where da.attribute_id = a.attribute_id"; return doQuery(connection, query, "getAppliedDataAttributes"); } private void setSubmissionRefs(Connection connection) throws ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process for (Integer submissionId : submissionDataMap.keySet()) { for (Integer dataId : submissionDataMap.get(submissionId)) { if (appliedDataMap.get(dataId).intermineObjectId == null) { continue; } Reference reference = new Reference(); reference.setName("submission"); reference.setRefId(submissionMap.get(submissionId).itemIdentifier); getChadoDBConverter().store(reference, appliedDataMap.get(dataId).intermineObjectId); } } LOG.info("TIME setting submission-data references: " + (System.currentTimeMillis() - bT)); } private void createDatabaseRecords(Connection connection) throws ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process Set<DatabaseRecordConfig> configs = initDatabaseRecordConfigs(); for (Integer submissionId : submissionDataMap.keySet()) { List<String> submissionDbRecords = new ArrayList<String>(); for (Integer dataId : submissionDataMap.get(submissionId)) { AppliedData ad = appliedDataMap.get(dataId); if (ad.type.equalsIgnoreCase("Result Value")) { for (DatabaseRecordConfig conf : configs) { for (String type : conf.types) { if (ad.name.equals(type)) { submissionDbRecords.addAll(createDatabaseRecords(ad.value, conf)); } } } } if (!submissionDbRecords.isEmpty()) { ReferenceList col = new ReferenceList("databaseRecords", submissionDbRecords); getChadoDBConverter().store(col, submissionMap.get(submissionId).interMineObjectId); } } } LOG.info("TIME creating DatabaseRecord objects: " + (System.currentTimeMillis() - bT)); } private void createResultFiles(Connection connection) throws ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process for (Integer submissionId : submissionDataMap.keySet()) { for (Integer dataId : submissionDataMap.get(submissionId)) { AppliedData ad = appliedDataMap.get(dataId); if (ad.type.equalsIgnoreCase("Result File")) { if (!StringUtils.isBlank(ad.value)) { createResultFile(ad.value, ad.name, submissionId); } } } } LOG.info("TIME creating ResultFile objects: " + (System.currentTimeMillis() - bT)); } private List<String> createDatabaseRecords(String accession, DatabaseRecordConfig config) throws ObjectStoreException { List<String> dbRecordIds = new ArrayList<String>(); Set<String> cleanAccessions = new HashSet<String>(); // NOTE - this is a special case to deal with a very strange SRA accession format in some // Celniker submissions. The 'accession' is provided as e.g. // SRR013492.225322.1;SRR013492.462158.1;... // We just want the unique SRR ids if (config.dbName.equals("SRA") && (accession.indexOf(';') != -1 || accession.indexOf('.') != -1)) { for (String part : accession.split(";")) { if (part.indexOf('.') != -1) { cleanAccessions.add(part.substring(0, part.indexOf('.'))); } else { cleanAccessions.add(part); } } } else { cleanAccessions.add(accession); } for (String cleanAccession : cleanAccessions) { dbRecordIds.add(createDatabaseRecord(cleanAccession, config)); } return dbRecordIds; } private String createDatabaseRecord(String accession, DatabaseRecordConfig config) throws ObjectStoreException { DatabaseRecordKey key = new DatabaseRecordKey(config.dbName, accession); String dbRecordId = dbRecords.get(key); if (dbRecordId == null) { Item dbRecord = getChadoDBConverter().createItem("DatabaseRecord"); dbRecord.setAttribute("database", config.dbName); dbRecord.setAttribute("description", config.dbDescrition); if (StringUtils.isEmpty(accession)) { dbRecord.setAttribute("accession", "To be confirmed"); } else { dbRecord.setAttribute("url", config.dbURL + accession); dbRecord.setAttribute("accession", accession); } getChadoDBConverter().store(dbRecord); dbRecordId = dbRecord.getIdentifier(); dbRecords.put(key, dbRecordId); } return dbRecordId; } private class DatabaseRecordKey { private String db; private String accession; /** * Construct with the database and accession * @param db database name * @param accession id in database */ public DatabaseRecordKey(String db, String accession) { this.db = db; this.accession = accession; } /** * {@inheritDoc} */ public boolean equals(Object o) { if (o instanceof DatabaseRecordKey) { DatabaseRecordKey otherKey = (DatabaseRecordKey) o; if (StringUtils.isNotEmpty(accession) && StringUtils.isNotEmpty(otherKey.accession)) { return this.db.equals(otherKey.db) && this.accession.equals(otherKey.accession); } } return false; } /** * {@inheritDoc} */ public int hashCode() { return db.hashCode() + 3 * accession.hashCode(); } } private void createResultFile(String fileName, String type, Integer submissionId) throws ObjectStoreException { Item resultFile = getChadoDBConverter().createItem("ResultFile"); resultFile.setAttribute("name", fileName); String url = null; if (fileName.startsWith("http")) { url = fileName; } else { String dccId = dccIdMap.get(submissionId); url = FILE_URL + dccId + "/extracted/" + fileName; } resultFile.setAttribute("url", url); resultFile.setAttribute("type", type); resultFile.setReference("submission", submissionMap.get(submissionId).itemIdentifier); getChadoDBConverter().store(resultFile); } private void createRelatedSubmissions(Connection connection) throws ObjectStoreException { Map<Integer, Set<String>> relatedSubs = new HashMap<Integer, Set<String>>(); for (Map.Entry<Integer, SubmissionReference> entry : submissionRefs.entrySet()) { Integer submissionId = entry.getKey(); SubmissionReference ref = entry.getValue(); addRelatedSubmissions(relatedSubs, submissionId, ref.referencedSubmissionId); addRelatedSubmissions(relatedSubs, ref.referencedSubmissionId, submissionId); } for (Map.Entry<Integer, Set<String>> entry : relatedSubs.entrySet()) { ReferenceList related = new ReferenceList("relatedSubmissions", new ArrayList<String>(entry.getValue())); getChadoDBConverter().store(related, entry.getKey()); } } private void addRelatedSubmissions(Map<Integer, Set<String>> relatedSubs, Integer subId, Integer relatedId) { Set<String> itemIds = relatedSubs.get(subId); if (itemIds == null) { itemIds = new HashSet<String>(); relatedSubs.put(submissionMap.get(subId).interMineObjectId, itemIds); } itemIds.add(submissionMap.get(relatedId).itemIdentifier); } //sub -> prot private void setSubmissionProtocolsRefs(Connection connection) throws ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process Map<Integer, List<Integer>> submissionProtocolMap = new HashMap<Integer, List<Integer>>(); Iterator<Integer> apId = appliedProtocolMap.keySet().iterator(); while (apId.hasNext()) { Integer thisAP = apId.next(); AppliedProtocol ap = appliedProtocolMap.get(thisAP); addToMap(submissionProtocolMap, ap.submissionId, ap.protocolId); } Iterator<Integer> subs = submissionProtocolMap.keySet().iterator(); while (subs.hasNext()) { Integer thisSubmissionId = subs.next(); List<Integer> protocolChadoIds = submissionProtocolMap.get(thisSubmissionId); ReferenceList collection = new ReferenceList(); collection.setName("protocols"); for (Integer protocolChadoId : protocolChadoIds) { collection.addRefId(protocolItemIds.get(protocolChadoId)); } Integer storedSubmissionId = submissionMap.get(thisSubmissionId).interMineObjectId; getChadoDBConverter().store(collection, storedSubmissionId); // may need protocols from referenced submissions to work out experiment type protocolChadoIds.addAll(findProtocolIdsFromReferencedSubmissions(thisSubmissionId)); String piName = submissionProjectMap.get(thisSubmissionId); setSubmissionExperimentType(storedSubmissionId, protocolChadoIds, piName); } LOG.info("TIME setting submission-protocol references: " + (System.currentTimeMillis() - bT)); } // store Submission.experimentType if it can be inferred from protocols private void setSubmissionExperimentType(Integer storedSubId, List<Integer> protocolIds, String piName) throws ObjectStoreException { Set<String> protocolTypes = new HashSet<String>(); for (Integer protocolId : protocolIds) { protocolTypes.add(protocolTypesMap.get(protocolId).trim()); } String experimentType = inferExperimentType(protocolTypes, piName); if (experimentType != null) { Attribute expTypeAtt = new Attribute("experimentType", experimentType); getChadoDBConverter().store(expTypeAtt, storedSubId); } } // Fetch protocols used to create reagents that are inputs to this submission, these are // found in referenced submissions private List<Integer> findProtocolIdsFromReferencedSubmissions(Integer submissionId) { List<Integer> protocolIds = new ArrayList<Integer>(); if (submissionRefs == null) { throw new RuntimeException("Attempting to access submissionRefs before it has been" + " populated, this method needs to be called after" + " processSubmissionProperties"); } SubmissionReference subRef = submissionRefs.get(submissionId); if (subRef != null) { for (AppliedProtocol aProtocol : findAppliedProtocolsFromReferencedSubmission(subRef)) { protocolIds.add(aProtocol.protocolId); } } return protocolIds; } /** * Work out an experiment type give the combination of protocols used for the * submussion. e.g. *immunoprecipitation + hybridization = chIP-chip * @param protocolTypes the protocal types * @param piName name of PI * @return a short experiment type */ protected String inferExperimentType(Set<String> protocolTypes, String piName) { // extraction + sequencing + reverse transcription - ChIP = RTPCR // extraction + sequencing - reverse transcription - ChIP = RNA-seq if (containsMatch(protocolTypes, "nucleic_acid_extraction|RNA extraction") && containsMatch(protocolTypes, "sequencing(_protocol)?") && !containsMatch(protocolTypes, "chromatin_immunoprecipitation")) { if (containsMatch(protocolTypes, "reverse_transcription")) { return "RTPCR"; } else { return "RNA-seq"; } } // reverse transcription + PCR + RACE = RACE // reverse transcription + PCR - RACE = RTPCR if (containsMatch(protocolTypes, "reverse_transcription") && containsMatch(protocolTypes, "PCR(_amplification)?")) { if (containsMatch(protocolTypes, "RACE")) { return "RACE"; } else { return "RTPCR"; } } // ChIP + hybridization = ChIP-chip // ChIP - hybridization = ChIP-seq if (containsMatch(protocolTypes, "(.*)?immunoprecipitation")) { if (containsMatch(protocolTypes, "hybridization")) { return "ChIP-chip"; } else { return "ChIP-seq"; } } // hybridization - ChIP = // Celniker: RNA tiling array // Henikoff: Chromatin-chip // otherwise: Tiling array if (containsMatch(protocolTypes, "hybridization") && !containsMatch(protocolTypes, "immunoprecipitation")) { if (piName.equals("Celniker")) { return "RNA tiling array"; } else if (piName.equals("Henikoff")) { return "Chromatin-chip"; } else { return "Tiling array"; } } // annotation = Computational annotation if (containsMatch(protocolTypes, "annotation")) { return "Computational annotation"; } // If we haven't found a type yet, and there is a growth protocol, then // this is probably an RNA sample creation experiment from Celniker if (containsMatch(protocolTypes, "grow")) { return "RNA sample creation"; } return null; } // utility method for looking up in a set by regular expression private boolean containsMatch(Set<String> testSet, String regex) { boolean matches = false; Pattern p = Pattern.compile(regex); for (String test : testSet) { Matcher m = p.matcher(test); if (m.matches()) { matches = true; } } return matches; } //sub -> exp private void setSubmissionExperimetRefs(Connection connection) throws ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process Iterator<String> exp = expSubMap.keySet().iterator(); while (exp.hasNext()) { String thisExp = exp.next(); List<Integer> subs = expSubMap.get(thisExp); Iterator <Integer> s = subs.iterator(); while (s.hasNext()) { Integer thisSubId = s.next(); Reference reference = new Reference(); reference.setName("experiment"); reference.setRefId(experimentIdRefMap.get(thisExp)); getChadoDBConverter().store(reference, submissionMap.get(thisSubId).interMineObjectId); } } LOG.info("TIME setting submission-experiment references: " + (System.currentTimeMillis() - bT)); } //sub -> ef private void setSubmissionEFactorsRefs(Connection connection) throws ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process Iterator<Integer> subs = submissionEFactorMap.keySet().iterator(); while (subs.hasNext()) { Integer thisSubmissionId = subs.next(); List<String> eFactors = submissionEFactorMap.get(thisSubmissionId); LOG.debug("EF REFS: " + thisSubmissionId + " (" + eFactors + ")"); Iterator<String> ef = eFactors.iterator(); ReferenceList collection = new ReferenceList(); collection.setName("experimentalFactors"); while (ef.hasNext()) { String currentEF = ef.next(); collection.addRefId(eFactorIdRefMap.get(currentEF)); LOG.debug("EF REFS: ->" + currentEF + " ref: " + eFactorIdRefMap.get(currentEF)); } if (!collection.equals(null)) { LOG.debug("EF REFS: ->" + thisSubmissionId + "|" + submissionMap.get(thisSubmissionId).interMineObjectId); getChadoDBConverter().store(collection, submissionMap.get(thisSubmissionId).interMineObjectId); } } LOG.info("TIME setting submission-exFactors references: " + (System.currentTimeMillis() - bT)); } //sub -> publication private void setSubmissionPublicationRefs(Connection connection) throws ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process Iterator<Integer> subs = publicationIdMap.keySet().iterator(); while (subs.hasNext()) { Integer thisSubmissionId = subs.next(); // Integer im_oid = publicationIdMap.get(thisSubmissionId); Reference reference = new Reference(); reference.setName("publication"); reference.setRefId(publicationIdRefMap.get(thisSubmissionId)); getChadoDBConverter().store(reference, submissionMap.get(thisSubmissionId).interMineObjectId); } LOG.info("TIME setting submission-publication references: " + (System.currentTimeMillis() - bT)); } /** * to store references between applied protocols and their input data * reverse reference: data -> next appliedProtocols * and between applied protocols and their output data * reverse reference: data -> previous appliedProtocols * (many to many) */ private void setDAGRefs(Connection connection) throws ObjectStoreException { long bT = System.currentTimeMillis(); // to monitor time spent in the process for (Integer thisAP : appliedProtocolMap.keySet()) { AppliedProtocol ap = appliedProtocolMap.get(thisAP); if (!ap.inputs.isEmpty()) { ReferenceList collection = new ReferenceList("inputs"); for (Integer inputId : ap.inputs) { collection.addRefId(appliedDataMap.get(inputId).itemIdentifier); } getChadoDBConverter().store(collection, appliedProtocolIdMap.get(thisAP)); } if (!ap.outputs.isEmpty()) { ReferenceList collection = new ReferenceList("outputs"); for (Integer outputId : ap.outputs) { collection.addRefId(appliedDataMap.get(outputId).itemIdentifier); } getChadoDBConverter().store(collection, appliedProtocolIdMap.get(thisAP)); } } LOG.info("TIME setting DAG references: " + (System.currentTimeMillis() - bT)); } /** * maps from chado field names to ours. * * TODO: check if up to date * * if a field is not needed it is marked with NOT_TO_BE_LOADED * a check is performed and fields unaccounted for are logged. */ private static final Map<String, String> FIELD_NAME_MAP = new HashMap<String, String>(); private static final String NOT_TO_BE_LOADED = "this is ; illegal - anyway"; static { // experiment // swapping back to uniquename in experiment table // FIELD_NAME_MAP.put("Investigation Title", "title"); FIELD_NAME_MAP.put("Investigation Title", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Project", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Project URL", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Lab", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Experiment Description", "description"); FIELD_NAME_MAP.put("Experimental Design", "design"); FIELD_NAME_MAP.put("Experimental Factor Type", "factorType"); FIELD_NAME_MAP.put("Experimental Factor Name", "factorName"); FIELD_NAME_MAP.put("Quality Control Type", "qualityControl"); FIELD_NAME_MAP.put("Replicate Type", "replicate"); FIELD_NAME_MAP.put("Date of Experiment", "experimentDate"); FIELD_NAME_MAP.put("Public Release Date", "publicReleaseDate"); FIELD_NAME_MAP.put("Embargo Date", "embargoDate"); FIELD_NAME_MAP.put("dcc_id", "DCCid"); FIELD_NAME_MAP.put("PubMed ID", "pubMedId"); FIELD_NAME_MAP.put("Person First Name", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Mid Initials", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Last Name", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Affiliation", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Address", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Phone", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Email", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Person Roles", NOT_TO_BE_LOADED); // data: parameter values FIELD_NAME_MAP.put("Array Data File", "arrayDataFile"); FIELD_NAME_MAP.put("Array Design REF", "arrayDesignRef"); FIELD_NAME_MAP.put("Derived Array Data File", "derivedArrayDataFile"); FIELD_NAME_MAP.put("Result File", "resultFile"); // protocol FIELD_NAME_MAP.put("Protocol Type", "type"); FIELD_NAME_MAP.put("url protocol", "url"); FIELD_NAME_MAP.put("species", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("references", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("lab", NOT_TO_BE_LOADED); FIELD_NAME_MAP.put("Comment", NOT_TO_BE_LOADED); } /** * to store identifiers in project maps. * @param i * @param chadoId * @param intermineObjectId * @throws ObjectStoreException */ private void storeInProjectMaps(Item i, String surnamePI, Integer intermineObjectId) throws ObjectStoreException { if (i.getClassName().equals("Project")) { projectIdMap .put(surnamePI, intermineObjectId); projectIdRefMap .put(surnamePI, i.getIdentifier()); } else { throw new IllegalArgumentException( "Type mismatch: expecting Project, getting " + i.getClassName().substring(37) + " with intermineObjectId = " + intermineObjectId + ", project = " + surnamePI); } debugMap .put(i.getIdentifier(), i.getClassName()); } /** * to store identifiers in lab maps. * @param i * @param chadoId * @param intermineObjectId * @throws ObjectStoreException */ private void storeInLabMaps(Item i, String labName, Integer intermineObjectId) throws ObjectStoreException { if (i.getClassName().equals("Lab")) { labIdMap .put(labName, intermineObjectId); labIdRefMap .put(labName, i.getIdentifier()); } else { throw new IllegalArgumentException( "Type mismatch: expecting Lab, getting " + i.getClassName().substring(37) + " with intermineObjectId = " + intermineObjectId + ", lab = " + labName); } debugMap .put(i.getIdentifier(), i.getClassName()); } private void mapSubmissionAndData(Integer submissionId, Integer dataId) { addToMap(submissionDataMap, submissionId, dataId); dataSubmissionMap.put(dataId, submissionId); } /** * method to wrap the execution of a query, without log info * @param connection * @param query * @return the result set * @throws SQLException */ private ResultSet doQuery(Connection connection, String query) throws SQLException { Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * method to wrap the execution of a query with log info) * @param connection * @param query * @param comment for not logging * @return the result set * @throws SQLException */ private ResultSet doQuery(Connection connection, String query, String comment) throws SQLException { // we could avoid passing comment if we trace the calling method // new Throwable().fillInStackTrace().getStackTrace()[1].getMethodName() LOG.info("executing: " + query); long bT = System.currentTimeMillis(); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); LOG.info("QUERY TIME " + comment + ": " + (System.currentTimeMillis() - bT)); return res; } /** * adds an element to a list which is the value of a map * @param m the map (<Integer, List<Integer>>) * @param key the key for the map * @param value the list */ private void addToMap(Map<Integer, List<Integer>> m, Integer key, Integer value) { List<Integer> values = new ArrayList<Integer>(); if (m.containsKey(key)) { values = m.get(key); } if (!values.contains(value)) { values.add(value); m.put(key, values); } } /** * adds an element to a list which is the value of a map * @param m the map (<Integer, List<String>>) * @param key the key for the map * @param value the list */ private void addToMap(Map<Integer, List<String>> m, Integer key, String value) { List<String> ids = new ArrayList<String>(); if (m.containsKey(key)) { ids = m.get(key); } if (!ids.contains(value)) { ids.add(value); m.put(key, ids); } } /** * adds an element to a list which is the value of a map * @param m the map (<String, List<Integer>>) * @param key the key for the map * @param value the list */ private void addToMap(Map<String, List<Integer>> m, String key, Integer value) { List<Integer> ids = new ArrayList<Integer>(); if (m.containsKey(key)) { ids = m.get(key); } if (!ids.contains(value)) { ids.add(value); m.put(key, ids); } } }
package org.intermine.bio.dataconversion; import org.intermine.bio.dataconversion.ChadoSequenceProcessor.FeatureData; import org.intermine.dataconversion.ItemsTestCase; public class ChadoSequenceProcessorTest extends ItemsTestCase { public ChadoSequenceProcessorTest(String arg) { super(arg); } public void testFeatureDataFlags() { FeatureData fdat = new FeatureData(); assertEquals(false, fdat.getFlag(FeatureData.DATASET_SET)); assertEquals(false, fdat.getFlag(FeatureData.EVIDENCE_CREATED)); fdat.setFlag(FeatureData.DATASET_SET, true); assertEquals(true, fdat.getFlag(FeatureData.DATASET_SET)); assertEquals(false, fdat.getFlag(FeatureData.EVIDENCE_CREATED)); fdat.setFlag(FeatureData.DATASET_SET, false); assertEquals(false, fdat.getFlag(FeatureData.DATASET_SET)); assertEquals(false, fdat.getFlag(FeatureData.EVIDENCE_CREATED)); } }
package org.project.neutrino.nfvo.catalogue.mano.common; import org.project.neutrino.nfvo.catalogue.util.IdGenerator; import javax.persistence.*; @Entity @Inheritance(strategy = InheritanceType.TABLE_PER_CLASS) public class DeploymentFlavour { /*ID of the deployment flavour.*/ @Id protected String id = IdGenerator.createUUID(); @Version protected int version = 0; /* * Assurance parameter against which this flavour is being described. The key could be a combination of multiple assurance * parameters with a logical relationship between them. The parameters should be present as a monitoring_parameter supported in clause 6.2.1.1. * For example, a flavour of a virtual EPC could be described in terms of the assurance parameter "calls per second" (cps). * */ protected String flavour_key; protected String extId; private int ram; private int disk; private int vcpus; public String getFlavour_key() { return flavour_key; } public void setFlavour_key(String flavour_key) { this.flavour_key = flavour_key; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getExtId() { return extId; } public void setExtId(String extId) { this.extId = extId; } public int getRam() { return ram; } public void setRam(int ram) { this.ram = ram; } public int getDisk() { return disk; } public void setDisk(int disk) { this.disk = disk; } public int getVcpus() { return vcpus; } public void setVcpus(int vcpus) { this.vcpus = vcpus; } @Override public String toString() { return "DeploymentFlavour{" + "id='" + id + '\'' + ", version=" + version + ", flavour_key='" + flavour_key + '\'' + ", extId='" + extId + '\'' + '}'; } }
package com.sensei.search.client.json; import java.util.Arrays; import org.json.JSONObject; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import com.sensei.search.client.json.req.Operator; import com.sensei.search.client.json.req.Selection; import com.sensei.search.client.json.req.SenseiClientRequest; import com.sensei.search.client.json.req.filter.Filter; import com.sensei.search.client.json.req.filter.Filters; import com.sensei.search.client.json.req.query.Queries; import com.sensei.search.client.json.req.query.Query; import com.sensei.search.client.json.res.SenseiResult; @Ignore public class JavaClientIntegrationTest extends Assert { private SenseiServiceProxy senseiServiceProxy; @Before public void setUp () { senseiServiceProxy = new SenseiServiceProxy("http://localhost:8080/sensei"); } @Test public void testSelectionRange() throws Exception { //2000 1548; //2001 1443; //2002 1464; // [2000 TO 2002] ==> 4455 // (2000 TO 2002) ==> 1443 // (2000 TO 2002] ==> 2907 // [2000 TO 2002) ==> 2991 SenseiClientRequest request = SenseiClientRequest.builder().addSelection(Selection.range("year", "2000", "2002")).build(); SenseiResult res = senseiServiceProxy.sendRequest(request); assertEquals("numhits is wrong", 4455, res.getNumhits().intValue()); request = SenseiClientRequest.builder().addSelection(Selection.range("year", "2000", "2002", false, false)).build(); res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 1443, res.getNumhits().intValue()); request = SenseiClientRequest.builder().addSelection(Selection.range("year", "2000", "2002", false, true)).build(); res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 2907, res.getNumhits().intValue()); request = SenseiClientRequest.builder().addSelection(Selection.range("year", "2000", "2002", true, false)).build(); res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 2991, res.getNumhits().intValue()); } @Test public void testMatchAllWithBoostQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query(Queries.matchAllQuery(1.2)).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 15000, res.getNumhits().intValue()); } @Test public void testQueryStringQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query(Queries.stringQuery("red AND cool")).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 1070, res.getNumhits().intValue()); } // @Test public void testUIDQueryRaw() throws Exception { String req = "{\"query\": {\"ids\": {\"values\": [\"1\", \"2\", \"3\"], \"excludes\": [\"2\"]}}}"; System.out.println(req); JSONObject res =new JSONObject(senseiServiceProxy.sendPost(req)); assertEquals("numhits is wrong", 2, res.getInt("numhits")); assertEquals("the first uid is wrong", 1, res.getJSONArray("hits").getJSONObject(0).getInt("uid")); assertEquals("the second uid is wrong", 3, res.getJSONArray("hits").getJSONObject(1).getInt("uid")); } @Test public void testUIDQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query(Queries.ids(Arrays.asList("1","2", "3"), Arrays.asList("2"), 1.0)).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 2, res.getNumhits().intValue()); assertEquals("the first uid is wrong", 1, res.getHits().get(0).getUid().intValue()); assertEquals("the second uid is wrong", 3, res.getHits().get(1).getUid().intValue()); } @Test public void testTextQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query(Queries.textQuery("contents", "red cool", Operator.and, 1.0)).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 1070, res.getNumhits().intValue()); } @Test public void testTermQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query(Queries.term("color", "red", 1.0)).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 2160, res.getNumhits().intValue()); } @Test public void testTermsQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query(Queries.terms("tags", Arrays.asList("leather", "moon-roof"), Arrays.asList("hybrid"), Operator.or, 0, 1.0)).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 5777, res.getNumhits().intValue()); } @Test public void testBooleanQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.bool(Arrays.asList((Query)Queries.term("color", "red", 1.0)), Arrays.asList((Query)Queries.term("category", "compact", 1.0)), null, 1.0)).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 1652, res.getNumhits().intValue()); } @Test public void testDisMaxQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.disMax(0.7, 1.2, Queries.term("color", "red", 1.0), Queries.term("color", "blue", 1.0)) ).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 3264, res.getNumhits().intValue()); } @Test public void testPathQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.path("makemodel","asian/acura/3.2tl" , 1.0) ).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 126, res.getNumhits().intValue()); } @Test public void testPrefixQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.prefix("color","blu" , 2.0) ).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 1104, res.getNumhits().intValue()); } @Test public void testWildcardQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.wildcard("color","bl*e" , 2.0) ).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 1104, res.getNumhits().intValue()); } @Test public void testRangeQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.range("year", "1999", "2000", true, true, 2.0, false) ).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 3015, res.getNumhits().intValue()); } @Test public void testRangeQuery2() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.range("year", "1999", "2000", true, true, 2.0, false, "int") ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 3015, res.getNumhits().intValue()); } @Test public void testFilteredQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.filteredQuery(Queries.term("color", "red", 1.0), Filters.range("year", "1999", "2000"), 1.0) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 447, res.getNumhits().intValue()); } @Test public void testSpanTermQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.spanTerm("color", "red", 1.0) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 2160, res.getNumhits().intValue()); } @Test public void testSpanOrQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.spanOr(1.0, Queries.spanTerm("color", "red", 1.0), Queries.spanTerm("color", "blue")) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 3264, res.getNumhits().intValue()); } public void testSpanOrQueryRaw() throws Exception { String req = "{\"query\":{\"span_or\":{\"clauses\":[{\"span_term\":{\"color\":\"red\"}},{\"span_term\":{\"color\":\"blue\"}}]}}}"; System.out.println(req); JSONObject res =new JSONObject(senseiServiceProxy.sendPost(req)); assertEquals("numhits is wrong", 3264, res.getInt("numhits")); } @Test public void testSpanNotQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.spanNot( Queries.spanTerm("contents", "compact", 1.0), Queries.spanTerm("contents", "red", 1.0), 1.0) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 4596, res.getNumhits().intValue()); } @Test public void testSpanNearQuery1() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.spanNear(Arrays.asList(Queries.spanTerm("contents", "red"), Queries.spanTerm("contents", "compact"), Queries.spanTerm("contents", "hybrid")), 12, false, false, 1.0) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 274, res.getNumhits().intValue()); } @Test public void testSpanNearQuery2() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.spanNear(Arrays.asList(Queries.spanTerm("contents", "red"), Queries.spanTerm("contents", "compact"), Queries.spanTerm("contents", "favorite")), 0, true, false, 1.0) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 63, res.getNumhits().intValue()); } @Test public void testSpanFirstQuery() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().query( Queries.spanFirst(Queries.spanTerm("color", "red"), 2, 1.0) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 2160, res.getNumhits().intValue()); } @Test public void testUIDFilter() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().filter(Filters.ids(Arrays.asList("1","2", "3"), Arrays.asList("2"))).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 2, res.getNumhits().intValue()); assertEquals("the first uid is wrong", 1, res.getHits().get(0).getUid().intValue()); assertEquals("the second uid is wrong", 3, res.getHits().get(1).getUid().intValue()); } @Test public void testAndFilter() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().filter( Filters.and(Filters.term("tags", "mp3") , Filters.term("color", "red")) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 439, res.getNumhits().intValue()); } @Test public void testOrFilter() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().filter( Filters.or(Filters.term("color", "blue") , Filters.term("color", "red")) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 3264, res.getNumhits().intValue()); } @Test public void testBooleanFilter() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().filter( Filters.bool(Arrays.asList((Filter)Filters.term("color", "red")), Arrays.asList((Filter)Filters.term("category", "compact")), Arrays.asList((Filter)Filters.term("color", "red"))) ).build(); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 1652, res.getNumhits().intValue()); } @Test public void testQueryFilter() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().filter( Filters.query(Queries.range("year", "1999", "2000",true, true, 1.0, false)) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 3015, res.getNumhits().intValue()); } @Test public void testTermFilter() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().filter( Filters.term("color", "red") ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 2160, res.getNumhits().intValue()); } @Test public void testTermsFilter() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().filter( Filters.terms("tags", Arrays.asList("leather", "moon-roof"), Arrays.asList("hybrid"),Operator.or) ).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); assertEquals("numhits is wrong", 5777, res.getNumhits().intValue()); } @Test public void testRangeFilter() throws Exception { SenseiClientRequest request = SenseiClientRequest.builder().filter( Filters.range("year", "1999", "2000") ).explain(true).build(); System.out.println(JsonSerializer.serialize(request)); SenseiResult res = senseiServiceProxy.sendRequest( request); //System.out.println(res); assertEquals("numhits is wrong", 3015, res.getNumhits().intValue()); } /* public void testRangeFilter() throws Exception { logger.info("executing test case testRangeFilter"); String req = "{\"filter\":{\"range\":{\"year\":{\"to\":2000,\"boost\":2,\"from\":1999,\"_noOptimize\":false}}}}"; JSONObject res = search(new JSONObject(req)); assertEquals("numhits is wrong", 3015, res.getInt("numhits")); } public void testRangeFilter2() throws Exception { logger.info("executing test case testRangeFilter2"); String req = "{\"filter\":{\"range\":{\"year\":{\"to\":\"2000\",\"boost\":2,\"from\":\"1999\",\"_noOptimize\":true,\"_type\":\"int\"}}}}"; JSONObject res = search(new JSONObject(req)); assertEquals("numhits is wrong", 3015, res.getInt("numhits")); }*/ /* Need to fix the bug in bobo and kamikazi, for details see the following two test cases:*/ /* another weird bug may exist somewhere in bobo or kamikazi.*/ /* In the following two test cases, when modifying the first one by changing "tags" to "tag", it is supposed that * Only the first test case is not correct, but the second one also throw one NPE, which is weird. * */ // public void testAndFilter2() throws Exception // logger.info("executing test case testAndFilter2"); // String req = "{\"filter\":{\"and\":[{\"term\":{\"tags\":\"mp3\",\"_noOptimize\":false}},{\"query\":{\"term\":{\"color\":\"red\"}}}]}}"; // JSONObject res = search(new JSONObject(req)); // assertEquals("numhits is wrong", 439, res.getInt("numhits")); // public void testOrFilter4() throws Exception // //color:blue ==> 1104 // //color:red ==> 2160 // logger.info("executing test case testOrFilter4"); // String req = "{\"filter\":{\"or\":[{\"term\":{\"color\":\"blue\",\"_noOptimize\":false}},{\"query\":{\"term\":{\"color\":\"red\"}}}]}}"; // JSONObject res = search(new JSONObject(req)); // assertEquals("numhits is wrong", 3264, res.getInt("numhits")); }
package eu.cloudopting.provision.digitalocean; import java.lang.reflect.Field; import java.security.KeyFactory; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.PrivateKey; import java.security.PublicKey; import java.util.Comparator; import org.bouncycastle.asn1.eac.RSAPublicKey; import org.bouncycastle.asn1.pkcs.RSAPrivateKey; import org.jclouds.ContextBuilder; import org.jclouds.digitalocean2.DigitalOcean2Api; import org.jclouds.digitalocean2.domain.Action; import org.jclouds.digitalocean2.domain.Droplet; import org.jclouds.digitalocean2.domain.DropletCreate; import org.jclouds.digitalocean2.domain.Image; import org.jclouds.digitalocean2.domain.Key; import org.jclouds.digitalocean2.domain.Region; import org.jclouds.digitalocean2.domain.Size; import org.jclouds.digitalocean2.domain.options.CreateDropletOptions; import org.jclouds.digitalocean2.domain.options.ImageListOptions; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; import com.google.common.primitives.Floats; import eu.cloudopting.cloud.CloudProvider; import eu.cloudopting.provision.AbstractProvision; @Service public class DigitaloceanProvision extends AbstractProvision<DigitaloceanResult, DigitaloceanRequest> { private final Logger log = LoggerFactory.getLogger(DigitaloceanProvision.class); private static final String OS_TYPE = "CentOS"; private enum OS_VERSION{ V7("7"), V6("6"), V5("5"); private String name; private OS_VERSION(String name){ this.name = name; } public String getName(){ return name; } } @Override public DigitaloceanResult provision(DigitaloceanRequest request) { // TODO Auto-generated method stub return null; } @Override public String provisionVM(DigitaloceanRequest request) { log.debug("in DO ProvisionVM"); DigitalOcean2Api api = getClient(request); Size machineType = getMachineType(api); log.debug("Size: " + machineType.toString()); Region region = getRegion(api, machineType); log.debug("Region null? " + (region == null)); log.debug("Region: " + region); Image image = getImage(api, region); log.debug("Image: " + image.name()); CreateDropletOptions digitalOceanSpecificParams = CreateDropletOptions.builder().backupsEnabled(false).privateNetworking(false).build(); //TODO create public/private key // try { // KeyPairGenerator generator; // generator = KeyPairGenerator.getInstance("RSA", "BC"); // generator.initialize(1024); // KeyPair keyPair = generator.generateKeyPair(); // RSAPrivateKey priv = (RSAPrivateKey) keyPair.getPrivate(); // RSAPublicKey pub = (RSAPublicKey) keyPair.getPublic(); // } catch (NoSuchAlgorithmException e1) { // // TODO Auto-generated catch block // e1.printStackTrace(); // } catch (NoSuchProviderException e1) { // // TODO Auto-generated catch block // e1.printStackTrace(); // //Key key = Key.create("id", "name", "fingerprint", publicKey); //TODO: per Luca Gioppo: qui viene settato il campo userData nell'oggetto che rappresenta le opzioni delle droplte DigitalOcean // CreateDropletOptions builder does not have a method for setting userData field. // We set userData field by reflection try { Field userData = CreateDropletOptions.class.getDeclaredField("userData"); userData.setAccessible(true); userData.set(digitalOceanSpecificParams, request.getUserData()); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("Error setting user data info at digitalocean provision"); } log.debug("Before calling create"); DropletCreate result = api.dropletApi().create("testOcean", region.slug(), machineType.slug(), image.slug(), digitalOceanSpecificParams); log.debug("After calling create"); return String.valueOf(result.droplet().id()); } // Method returns the cheapest machine by hourly price private Size getMachineType(DigitalOcean2Api api){ Size size = api.sizeApi().list().concat().toSortedList(new Comparator<Size>() { @Override public int compare(Size machine1, Size machine2) { if(!machine1.available()){ return 1; } if(!machine2.available()){ return -1; } return Floats.compare(machine1.priceHourly(), machine2.priceHourly()); } }).get(0); return size; } // Method returns first available region for starting a vm of type machineType private Region getRegion(DigitalOcean2Api api, Size machineType){ Optional<? extends Region> region = api.regionApi().list().concat().firstMatch(isRegionOk(machineType)); return region.orNull(); } private static Predicate<Region> isRegionOk(Size machineType) { return new Predicate<Region>() { @Override public boolean apply(Region region) { return region.available() && machineType.regions().contains(region.slug()); } }; } private Image getImage(DigitalOcean2Api api, Region region){ FluentIterable<Image> images = api.imageApi(). list(ImageListOptions.Builder.type("distribution")). filter(new Predicate<Image>() { @Override public boolean apply(Image image) { return image.regions().contains(region.slug()) && image.distribution().equals(OS_TYPE); } }); for(OS_VERSION osVersion : OS_VERSION.values()){ Optional<? extends Image> image = images.firstMatch(isVersionOk(osVersion)); if(image.isPresent()){ return image.get(); } } throw new RuntimeException("Cannot find image for OS " + OS_TYPE + " in region " + region.name()); } private static Predicate<Image> isVersionOk(OS_VERSION osVersion) { return new Predicate<Image>() { @Override public boolean apply(Image image) { return image.name().startsWith(osVersion.getName()); } }; } @Override public boolean checkVMdeployed(DigitaloceanRequest request, String taskId) { DigitalOcean2Api api = getClient(request); Droplet droplet = api.dropletApi().get(Integer.valueOf(taskId)); if(droplet != null && droplet.status() == Droplet.Status.ACTIVE){ return true; } return false; } private DigitalOcean2Api getClient(DigitaloceanRequest request) { ContextBuilder builder = ContextBuilder.newBuilder(CloudProvider.DIGITALOCEAN.getJcloudsName()) .credentials(request.getIdentity(), request.getCredential()); //.endpoint(request.getEndpoint()); return builder.buildApi(DigitalOcean2Api.class); } @Override public JSONObject getVMinfo(DigitaloceanRequest request, String taskId) { DigitalOcean2Api api = getClient(request); Droplet droplet = api.dropletApi().get(Integer.valueOf(taskId)); if(droplet == null){ return null; } JSONObject vmData = new JSONObject(); try { vmData.put("vmId", droplet.id()); } catch (JSONException e) { e.printStackTrace(); } return vmData; } @Override public String acquireIp(DigitaloceanRequest myRequest) { // TODO Auto-generated method stub return null; } @Override public boolean checkIpAcquired(DigitaloceanRequest myRequest, String taskId) { // TODO Auto-generated method stub return false; } @Override public JSONObject getAcquiredIpinfo(DigitaloceanRequest request, String vmId) { DigitalOcean2Api api = getClient(request); Droplet droplet = api.dropletApi().get(Integer.valueOf(vmId)); if(droplet == null){ return null; } JSONObject ipData = new JSONObject(); try { ipData.put("ip", droplet.getPublicAddresses().iterator().next().ip()); ipData.put("ipId", ""); } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } return ipData; } @Override public String portForward(DigitaloceanRequest myRequest) { // TODO Auto-generated method stub return null; } @Override public boolean checkPortForward(DigitaloceanRequest myRequest, String taskId) { // TODO Auto-generated method stub return false; } @Override public JSONObject getVMinfoById(DigitaloceanRequest request) { DigitalOcean2Api api = getClient(request); Droplet droplet = api.dropletApi().get(Integer.valueOf(request.getVirtualMachineId())); System.out.println("VM:" + droplet.toString()); JSONObject vmData = new JSONObject(); try { // vmData.put("vmId", vm.getId()); vmData.put("vmId", droplet.id()); vmData.put("created", droplet.createdAt()); vmData.put("state", droplet.status()); System.out.println("VM STATE:" + droplet.status()); } catch (JSONException e) { e.printStackTrace(); } return vmData; } @Override public String removeISO(DigitaloceanRequest myRequest) { return ""; } @Override public boolean checkIso(DigitaloceanRequest myRequest, String taskId) { return true; } @Override public String startVM(DigitaloceanRequest request) { DigitalOcean2Api api = getClient(request); Action powerOnaction = api.dropletApi().powerOn(Integer.valueOf(request.getVirtualMachineId())); return String.valueOf(powerOnaction.id()); } }
package io.enmasse.barnabas.controller.cluster; import io.enmasse.barnabas.controller.cluster.resources.KafkaResource; import io.enmasse.barnabas.controller.cluster.resources.ZookeeperResource; import io.fabric8.kubernetes.api.model.*; import io.fabric8.kubernetes.api.model.extensions.StatefulSet; import io.fabric8.kubernetes.client.DefaultKubernetesClient; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.KubernetesClientException; import io.fabric8.kubernetes.client.Watcher; import io.vertx.core.AbstractVerticle; import io.vertx.core.Future; import io.vertx.core.WorkerExecutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; public class ClusterController extends AbstractVerticle { private static final Logger log = LoggerFactory.getLogger(ClusterController.class.getName()); private final KubernetesClient kubernetesClient; private final Map<String, String> labels; private final String namespace; private WorkerExecutor executor; public ClusterController(ClusterControlerConfig config) throws Exception { log.info("Creating ClusterController"); this.namespace = config.getNamespace(); this.labels = config.getLabels(); this.kubernetesClient = new DefaultKubernetesClient(); } @Override public void start(Future<Void> start) { log.info("Starting ClusterController"); this.executor = getVertx().createSharedWorkerExecutor("kubernetes-ops-pool", 5, 60000000000l); // time is in ns! getVertx().executeBlocking( future -> { kubernetesClient.configMaps().inNamespace(namespace).withLabels(labels).watch(new Watcher<ConfigMap>() { @Override public void eventReceived(Action action, ConfigMap cm) { switch (action) { case ADDED: log.info("New ConfigMap {}", cm.getMetadata().getName()); reconcile(); break; case DELETED: log.info("Deleted ConfigMap {}", cm.getMetadata().getName()); reconcile(); break; case MODIFIED: log.info("Modified ConfigMap {}", cm.getMetadata().getName()); reconcile(); break; case ERROR: log.info("Failed ConfigMap {}", cm.getMetadata().getName()); reconcile(); break; default: log.info("Unknown action: {}", cm.getMetadata().getName()); reconcile(); } } @Override public void onClose(KubernetesClientException e) { log.info("Watcher closed", e); } }); future.complete(); }, res -> { if (res.succeeded()) { log.info("ClusterController up and running"); start.complete(); } else { log.info("ClusterController startup failed"); start.fail("ClusterController startup failed"); } } ); } private void reconcile() { log.info("Reconciling ..."); List<ConfigMap> cms = kubernetesClient.configMaps().inNamespace(namespace).withLabels(labels).list().getItems(); List<StatefulSet> sss = kubernetesClient.apps().statefulSets().inNamespace(namespace).withLabels(labels).list().getItems(); List<String> cmsNames = cms.stream().map(cm -> cm.getMetadata().getName()).collect(Collectors.toList()); List<String> sssNames = sss.stream().map(cm -> cm.getMetadata().getName()).collect(Collectors.toList()); List<ConfigMap> addList = cms.stream().filter(cm -> !sssNames.contains(cm.getMetadata().getName())).collect(Collectors.toList()); List<ConfigMap> updateList = cms.stream().filter(cm -> sssNames.contains(cm.getMetadata().getName())).collect(Collectors.toList()); List<StatefulSet> deletionList = sss.stream().filter(ss -> !cmsNames.contains(ss.getMetadata().getName())).collect(Collectors.toList()); addClusters(addList); deleteClusters(deletionList); updateClusters(updateList); } private void addClusters(List<ConfigMap> add) { for (ConfigMap cm : add) { log.info("Cluster {} should be added", cm.getMetadata().getName()); addCluster(cm); } } private void addCluster(ConfigMap add) { executor.executeBlocking( future -> { log.info("Adding cluster {}", add.getMetadata().getName()); ZookeeperResource.fromConfigMap(add, kubernetesClient).create(); KafkaResource.fromConfigMap(add, kubernetesClient).create(); future.complete(); }, false, res -> { if (res.succeeded()) { log.info("Cluster added {}", add.getMetadata().getName()); } else { log.error("Failed to add cluster {}", add.getMetadata().getName()); } }); } private void updateClusters(List<ConfigMap> update) { for (ConfigMap cm : update) { log.info("Cluster {} should be checked for updates -> NOT IMPLEMENTED YET", cm.getMetadata().getName()); // No configuration => nothing to update } } private void deleteClusters(List<StatefulSet> delete) { for (StatefulSet ss : delete) { log.info("Cluster {} should be deleted", ss.getMetadata().getName()); deleteCluster(ss); } } private void deleteCluster(StatefulSet ss) { executor.executeBlocking( future -> { log.info("Deleting cluster {}", ss.getMetadata().getName()); KafkaResource.fromStatefulSet(ss, kubernetesClient).delete(); ZookeeperResource.fromStatefulSet(ss, kubernetesClient).delete(); future.complete(); }, false, res -> { if (res.succeeded()) { log.info("Cluster deleted {}", ss.getMetadata().getName()); } else { log.error("Failed to delete cluster {}", ss.getMetadata().getName()); } }); } }
package org.intellij.sdk.view.pane; import com.intellij.icons.AllIcons; import com.intellij.ide.projectView.PresentationData; import com.intellij.ide.projectView.ProjectView; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileEvent; import com.intellij.openapi.vfs.VirtualFileListener; import com.intellij.psi.search.FilenameIndex; import com.intellij.util.Alarm; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.util.*; public class ImagesProjectNode extends AbstractTreeNode<VirtualFile> { private static final Key<Set<VirtualFile>> IMAGES_PROJECT_DIRS = Key.create("images.files.or.directories"); public ImagesProjectNode(final Project project) { super(project, ProjectUtil.guessProjectDir(project)); scanImages(project); subscribeToVFS(project); } public ImagesProjectNode(Project project, VirtualFile file) { super(project, file); } private void scanImages(Project project) { addAllByExt(project, "png"); addAllByExt(project, "jpg"); addAllByExt(project, "svg"); } private void addAllByExt(Project project, String ext) { final Set<VirtualFile> imagesFiles = getImagesFiles(project); final VirtualFile projectDir = ProjectUtil.guessProjectDir(project); for (VirtualFile file : FilenameIndex.getAllFilesByExt(project, ext)) { while (file != null && !file.equals(projectDir)) { imagesFiles.add(file); file = file.getParent(); } } } @NotNull private Set<VirtualFile> getImagesFiles(Project project) { Set<VirtualFile> files = project.getUserData(IMAGES_PROJECT_DIRS); if (files == null) { files = new HashSet<>(); project.putUserData(IMAGES_PROJECT_DIRS, files); } return files; } @Override protected VirtualFile getVirtualFile() { return getValue(); } @NotNull @Override public Collection<? extends AbstractTreeNode<?>> getChildren() { final List<VirtualFile> files = new ArrayList<>(0); for (VirtualFile file : getValue().getChildren()) { if (getImagesFiles(myProject).contains(file)) { files.add(file); } } if (files.isEmpty()) return Collections.emptyList(); final List<AbstractTreeNode<?>> nodes = new ArrayList<>(files.size()); final boolean alwaysOnTop = ProjectView.getInstance(myProject).isFoldersAlwaysOnTop(""); files.sort((o1, o2) -> { if (alwaysOnTop) { final boolean d1 = o1.isDirectory(); final boolean d2 = o2.isDirectory(); if (d1 && !d2) return -1; if (!d1 && d2) return 1; } return StringUtil.naturalCompare(o1.getName(), o2.getName()); }); for (VirtualFile file : files) { nodes.add(new ImagesProjectNode(myProject, file)); } return nodes; } @Override protected void update(PresentationData data) { data.setIcon(getValue().isDirectory() ? AllIcons.Nodes.Folder : getValue().getFileType().getIcon()); data.setPresentableText(getValue().getName()); } @Override public boolean canNavigate() { return !getValue().isDirectory(); } @Override public boolean canNavigateToSource() { return canNavigate(); } @Override public void navigate(boolean requestFocus) { FileEditorManager.getInstance(myProject).openFile(getValue(), false); } private void subscribeToVFS(final Project project) { final Alarm alarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, project); LocalFileSystem.getInstance().addVirtualFileListener(new VirtualFileListener() { { final VirtualFileListener me = this; Disposer.register(project, () -> LocalFileSystem.getInstance().removeVirtualFileListener(me)); } @Override public void fileCreated(@NotNull VirtualFileEvent event) { handle(event); } @Override public void fileDeleted(@NotNull VirtualFileEvent event) { handle(event); } void handle(VirtualFileEvent event) { final String filename = event.getFileName().toLowerCase(); if (filename.endsWith(".png") || filename.endsWith(".jpg")) { alarm.cancelAllRequests(); alarm.addRequest(() -> { getImagesFiles(project).clear(); scanImages(project); SwingUtilities.invokeLater(() -> ProjectView.getInstance(myProject) .getProjectViewPaneById(ImagesProjectViewPane.ID) .updateFromRoot(true)); }, 1000); } } }); } }
package edu.duke.cabig.c3pr.dao; import java.util.List; import org.springframework.transaction.annotation.Transactional; import edu.duke.cabig.c3pr.domain.PlannedNotification; import edu.duke.cabig.c3pr.domain.RecipientScheduledNotification; /** * Hibernate implementation of ArmDao * * @see edu.duke.cabig.c3pr.dao.RecipientScheduledNotificationDao * @author Priyatam */ public class RecipientScheduledNotificationDao extends GridIdentifiableDao<RecipientScheduledNotification> { @Override public Class<RecipientScheduledNotification> domainClass() { return RecipientScheduledNotification.class; } /* * Returns all Arm objects (non-Javadoc) * * @see edu.duke.cabig.c3pr.dao.Arm#getAll() */ // For Oracle test-build, the readOnly has been changed from true to false; @Transactional(readOnly=false) public List<RecipientScheduledNotification> getAll() { return getHibernateTemplate().find("from RecipientScheduledNotification"); } //readOnly was chnaged from true to false to get notifications to work on ORacle @Transactional(readOnly=false) public RecipientScheduledNotification getInitializedRecipientScheduledNotificationById(int id){ RecipientScheduledNotification recipientScheduledNotification = getById(id); return recipientScheduledNotification; } @Transactional(readOnly=false) public void saveOrUpdate(RecipientScheduledNotification recipientScheduledNotification){ //do not remove the flush...imperative for the notifications flow. getHibernateTemplate().saveOrUpdate(recipientScheduledNotification); getHibernateTemplate().flush(); } }
package edu.duke.cabig.c3pr.service; import static edu.duke.cabig.c3pr.C3PRUseCase.IMPORT_STUDY; import java.io.File; import java.io.InputStream; import java.util.List; import edu.duke.cabig.c3pr.C3PRUseCases; import edu.duke.cabig.c3pr.dao.HealthcareSiteDao; import edu.duke.cabig.c3pr.dao.StudyDao; import edu.duke.cabig.c3pr.domain.CompanionStudyAssociation; import edu.duke.cabig.c3pr.domain.HealthcareSite; import edu.duke.cabig.c3pr.domain.Study; import edu.duke.cabig.c3pr.domain.StudyOrganization; import edu.duke.cabig.c3pr.service.impl.StudyXMLImporterServiceImpl; import edu.duke.cabig.c3pr.utils.MasqueradingDaoTestCase; import edu.duke.cabig.c3pr.utils.StringUtils; import edu.duke.cabig.c3pr.xml.XMLParser; import edu.duke.cabig.c3pr.xml.XmlMarshaller; @C3PRUseCases( { IMPORT_STUDY }) public class StudyXMLImporterTestCase extends MasqueradingDaoTestCase<StudyDao> { private StudyXMLImporterServiceImpl studyImporter; private HealthcareSiteDao healthcareSitedao; public XMLParser xmlParser; XmlMarshaller marshaller; protected void setUp() throws Exception { super.setUp(); // To change body of overridden methods use File | // Settings | File // Templates. healthcareSitedao = (HealthcareSiteDao) getApplicationContext() .getBean("healthcareSiteDao"); marshaller = new XmlMarshaller("c3pr-study-xml-castor-mapping.xml"); studyImporter = (StudyXMLImporterServiceImpl) getApplicationContext() .getBean("studyXMLImporterService"); xmlParser = (XMLParser)getApplicationContext().getBean("xmlParser"); } public void testStudyValidation() throws Exception { Study study = getDao().getById(1000); try { InputStream is = getClass().getClassLoader() .getResourceAsStream("c3pr-sample-study.xml"); String str = is.toString(); byte[] b = str.getBytes(); // xmlParser.validate(b); } catch (Exception e) { e.printStackTrace(); fail("Unable to Validate"); } } public void testGetStudies() throws Exception { for (int i = 1000; i < 1003; i++) { Study study = getDao().getById(i); getDao().initialize(study); interruptSession(); // have to set the coordinating center identifier to something // differnt to prevent duplicate study exception. // The studies in daoTest.xml have already been inserted into // database. HealthcareSite healthcareSite = healthcareSitedao.getById(i); study.getCoordinatingCenterAssignedIdentifier().setHealthcareSite( healthcareSite); study.getCoordinatingCenterAssignedIdentifier().setValue("abc" + i); if (study.getFundingSponsorAssignedIdentifier() != null) study.getFundingSponsorAssignedIdentifier().setValue("abc" + i); for (CompanionStudyAssociation companionStudyAssociation : study .getCompanionStudyAssociations()) { Study companionStudy = companionStudyAssociation .getCompanionStudy(); companionStudy.getCoordinatingCenterAssignedIdentifier() .setValue("pqr" + i); if (companionStudy.getFundingSponsorAssignedIdentifier() != null) companionStudy.getFundingSponsorAssignedIdentifier() .setValue("pqr" + i); } String[] xmlStudy = (marshaller.toXML(study)).split(">", 2); String studyXml = xmlStudy[0] + "><studies>" + xmlStudy[1] + "</studies> "; System.out.println(studyXml); File outputXMLFile = new File("dummyOutput.xml"); List<Study> studies = studyImporter.importStudies(StringUtils .getInputStream(studyXml), outputXMLFile); assertNotNull(studies); // studies.get(0).getStudyDiseases().get(0); assertTrue(studies.size() > 0); for (Study loadedStudy : studies) { assertNotNull(loadedStudy); assertEquals(loadedStudy.getStudyOrganizations().size(), study .getStudyOrganizations().size()); for (StudyOrganization organization : loadedStudy .getStudyOrganizations()) { assertNotNull(organization.getHealthcareSite()); } } } } /** * What dao class is the test trying to Masquerade * * @return */ public Class<StudyDao> getMasqueradingDaoClassName() { return StudyDao.class; } public StudyXMLImporterServiceImpl getStudyImporter() { return studyImporter; } public void setStudyImporter(StudyXMLImporterServiceImpl studyImporter) { this.studyImporter = studyImporter; } public XmlMarshaller getMarshaller() { return marshaller; } public void setMarshaller(XmlMarshaller marshaller) { this.marshaller = marshaller; } }
package edu.duke.cabig.c3pr.web.admin; import java.util.Date; import java.util.List; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.springframework.validation.BindException; import org.springframework.web.servlet.ModelAndView; import edu.duke.cabig.c3pr.dao.C3PRBaseDao; import edu.duke.cabig.c3pr.dao.InvestigatorDao; import edu.duke.cabig.c3pr.domain.ContactMechanism; import edu.duke.cabig.c3pr.domain.ContactMechanismType; import edu.duke.cabig.c3pr.domain.HealthcareSiteInvestigator; import edu.duke.cabig.c3pr.domain.Investigator; import edu.duke.cabig.c3pr.domain.LocalInvestigator; import edu.duke.cabig.c3pr.domain.RemoteInvestigator; import edu.duke.cabig.c3pr.domain.SiteInvestigatorGroupAffiliation; import edu.duke.cabig.c3pr.domain.StudyInvestigator; import edu.duke.cabig.c3pr.exception.C3PRBaseException; import edu.duke.cabig.c3pr.exception.C3PRBaseRuntimeException; import edu.duke.cabig.c3pr.service.PersonnelService; import edu.duke.cabig.c3pr.utils.StringUtils; /** * @author Ramakrishna * @author kherm */ public class CreateInvestigatorController<C extends Investigator> extends AbstractCreateC3PRUserController<C, C3PRBaseDao<C>> { private PersonnelService personnelService; private InvestigatorDao investigatorDao; private String EDIT_FLOW = "EDIT_FLOW"; private String SAVE_FLOW = "SAVE_FLOW"; private String FLOW = "FLOW"; private Logger log = Logger.getLogger(CreateInvestigatorController.class); public CreateInvestigatorController() { } /** * Create a nested object graph that Create Investigator Design needs Incase the flow is coming * from search...we get the id and get the corresponding investigator obj. * * @param request - * HttpServletRequest * @throws ServletException */ protected Object formBackingObject(HttpServletRequest request) throws ServletException { Investigator inv; String email = request.getParameter("emailId") ; if (!StringUtils.isBlank(email)) { log.info(" Request URl is:" + request.getRequestURL().toString()); List<Investigator> investigators = investigatorDao.getByEmailAddress(email); inv = investigators.get(0); int cmSize = inv.getContactMechanisms().size(); if (cmSize == 0) { addContacts(inv); } if (cmSize == 1) { ContactMechanism contactMechanismPhone = new ContactMechanism(); ContactMechanism contactMechanismFax = new ContactMechanism(); contactMechanismPhone.setType(ContactMechanismType.PHONE); contactMechanismFax.setType(ContactMechanismType.Fax); inv.addContactMechanism(contactMechanismPhone); inv.addContactMechanism(contactMechanismFax); } if (cmSize == 2) { ContactMechanism contactMechanismFax = new ContactMechanism(); contactMechanismFax.setType(ContactMechanismType.Fax); inv.addContactMechanism(contactMechanismFax); } request.getSession().setAttribute(FLOW, EDIT_FLOW); log.info(" Investigator's ID is:" + inv.getId()); } else { inv = createInvestigatorWithDesign(); request.getSession().setAttribute(FLOW, SAVE_FLOW); } return inv; } @Override protected boolean shouldSave(HttpServletRequest request, Investigator command) { return true; } @Override protected void onBindAndValidate(HttpServletRequest request, Object command, BindException errors) throws Exception { super.onBindAndValidate(request, command, errors); Investigator investigator = (Investigator) command; if(!"saveRemoteInvestigator".equals(request.getParameter("_action")) || (request.getParameter("_action").equals("syncInvestigator") && request.getSession().getAttribute(FLOW).equals(EDIT_FLOW))){ if (! request.getParameter("_action").equals("syncInvestigator")) { List<Investigator> invFromDB = investigatorDao .getByEmailAddressFromLocal(investigator .getEmailAsString()); if (invFromDB != null && invFromDB.size() > 0) { return; } } List<Investigator> remoteInvestigators = investigatorDao.getRemoteInvestigators(investigator); boolean matchingExternalInvestigatorPresent = false; for(Investigator remoteInv : remoteInvestigators){ if(remoteInv.getEmailAsString().equals(investigator.getEmailAsString())){ investigator.addExternalInvestigator(remoteInv); matchingExternalInvestigatorPresent = true; } } if(matchingExternalInvestigatorPresent){ errors.reject("REMOTE_INV_EXISTS","Investigator with email " +investigator.getEmailAsString()+ " exisits in external system"); } } } @Override protected ModelAndView onSynchronousSubmit(HttpServletRequest request, HttpServletResponse response, Object command, BindException errors) throws Exception { Investigator investigator = (Investigator) command; RemoteInvestigator remoteInvSelected = null; boolean saveExternalInvestigator = false; try { if (request.getSession().getAttribute(FLOW).equals(SAVE_FLOW)) { if("saveRemoteInvestigator".equals(request.getParameter("_action"))){ saveExternalInvestigator = true; remoteInvSelected = (RemoteInvestigator) investigator.getExternalInvestigators().get(Integer.parseInt(request.getParameter("_selected"))); investigatorDao.buildAndSaveNewRemoteInvestigator(remoteInvSelected); } else{ personnelService.save(investigator); } } else if ("saveRemoteInvestigator".equals(request.getParameter("_action"))) { investigatorDao.evict(investigator); if(investigator.getExternalInvestigators()!=null && investigator.getExternalInvestigators().size()>0){ saveExternalInvestigator = true; remoteInvSelected = (RemoteInvestigator) investigator .getExternalInvestigators().get( Integer.parseInt(request .getParameter("_selected"))); personnelService.convertLocalInvestigatorToRemoteInvestigator((LocalInvestigator)investigator, remoteInvSelected); } } else { for (HealthcareSiteInvestigator hcsInv : investigator.getHealthcareSiteInvestigators()) { if (hcsInv.getStatusCode() != null && !hcsInv.getStatusCode().equals("AC")) { for (SiteInvestigatorGroupAffiliation sInvGrAff : hcsInv .getSiteInvestigatorGroupAffiliations()) { sInvGrAff.setEndDate(new Date()); } for (StudyInvestigator studyInv : hcsInv.getStudyInvestigators()) { studyInv.setEndDate(new Date()); } } } investigator = personnelService.merge(investigator); } } catch (C3PRBaseException e) { e.printStackTrace(); // To change body of catch statement use File | Settings | File // Templates. } catch (C3PRBaseRuntimeException e) { if (e.getRootCause().getMessage().contains("MailException")) { // no problem log.info("Error saving Investigator.Probably failed to send email", e); } } Map map = errors.getModel(); map.put("command", saveExternalInvestigator? remoteInvSelected:investigator); String studyflow = request.getParameter("studyflow"); if(!StringUtils.isBlank(studyflow)){ map.put("studyflow", studyflow); } ModelAndView mv = new ModelAndView(getSuccessView(), map); return mv; } private Investigator createInvestigatorWithDesign() { LocalInvestigator investigator = new LocalInvestigator(); HealthcareSiteInvestigator healthcareSiteInvestigator = new HealthcareSiteInvestigator(); investigator.addHealthcareSiteInvestigator(healthcareSiteInvestigator); addContacts(investigator); return investigator; } private void addContacts(Investigator inv) { ContactMechanism contactMechanismEmail = new ContactMechanism(); ContactMechanism contactMechanismPhone = new ContactMechanism(); ContactMechanism contactMechanismFax = new ContactMechanism(); contactMechanismEmail.setType(ContactMechanismType.EMAIL); contactMechanismPhone.setType(ContactMechanismType.PHONE); contactMechanismFax.setType(ContactMechanismType.Fax); inv.addContactMechanism(contactMechanismEmail); inv.addContactMechanism(contactMechanismPhone); inv.addContactMechanism(contactMechanismFax); } /* * (non-Javadoc) * * @see org.springframework.web.servlet.mvc.SimpleFormController#onSubmit (HttpServletRequest * request, HttpServletResponse response, Object command, BindException errors) */ @Override protected boolean suppressValidation(HttpServletRequest request, Object command) { if(command instanceof RemoteInvestigator){ return true; } return ("saveRemoteInvestigator".equals(request.getParameter("_action")) || "syncInvestigator".equals(request.getParameter("_action"))); } public PersonnelService getPersonnelService() { return personnelService; } public void setPersonnelService(PersonnelService personnelService) { this.personnelService = personnelService; } public InvestigatorDao getInvestigatorDao() { return investigatorDao; } public void setInvestigatorDao(InvestigatorDao investigatorDao) { this.investigatorDao = investigatorDao; } @Override protected C3PRBaseDao getDao() { return this.investigatorDao; } @Override protected C getPrimaryDomainObject(C command) { return command; } }
package org.chromium.content.browser.input; import android.os.SystemClock; import android.text.Editable; import android.text.InputType; import android.text.Selection; import android.text.TextUtils; import android.util.Log; import android.view.KeyEvent; import android.view.View; import android.view.inputmethod.BaseInputConnection; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.ExtractedText; import android.view.inputmethod.ExtractedTextRequest; import com.google.common.annotations.VisibleForTesting; public class AdapterInputConnection extends BaseInputConnection { private static final String TAG = "AdapterInputConnection"; private static final boolean DEBUG = false; /** * Selection value should be -1 if not known. See EditorInfo.java for details. */ public static final int INVALID_SELECTION = -1; public static final int INVALID_COMPOSITION = -1; private final View mInternalView; private final ImeAdapter mImeAdapter; private final Editable mEditable; private boolean mSingleLine; private int mNumNestedBatchEdits = 0; private int mLastUpdateSelectionStart = INVALID_SELECTION; private int mLastUpdateSelectionEnd = INVALID_SELECTION; private int mLastUpdateCompositionStart = INVALID_COMPOSITION; private int mLastUpdateCompositionEnd = INVALID_COMPOSITION; @VisibleForTesting AdapterInputConnection(View view, ImeAdapter imeAdapter, Editable editable, EditorInfo outAttrs) { super(view, true); mInternalView = view; mImeAdapter = imeAdapter; mImeAdapter.setInputConnection(this); mEditable = editable; // The editable passed in might have been in use by a prior keyboard and could have had // prior composition spans set. To avoid keyboard conflicts, remove all composing spans // when taking ownership of an existing Editable. removeComposingSpans(mEditable); mSingleLine = true; outAttrs.imeOptions = EditorInfo.IME_FLAG_NO_FULLSCREEN | EditorInfo.IME_FLAG_NO_EXTRACT_UI; outAttrs.inputType = EditorInfo.TYPE_CLASS_TEXT | EditorInfo.TYPE_TEXT_VARIATION_WEB_EDIT_TEXT; if (imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypeText) { // Normal text field outAttrs.inputType |= EditorInfo.TYPE_TEXT_FLAG_AUTO_CORRECT; outAttrs.imeOptions |= EditorInfo.IME_ACTION_GO; } else if (imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypeTextArea || imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypeContentEditable) { // TextArea or contenteditable. outAttrs.inputType |= EditorInfo.TYPE_TEXT_FLAG_MULTI_LINE | EditorInfo.TYPE_TEXT_FLAG_CAP_SENTENCES | EditorInfo.TYPE_TEXT_FLAG_AUTO_CORRECT; outAttrs.imeOptions |= EditorInfo.IME_ACTION_NONE; mSingleLine = false; } else if (imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypePassword) { // Password outAttrs.inputType = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_WEB_PASSWORD; outAttrs.imeOptions |= EditorInfo.IME_ACTION_GO; } else if (imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypeSearch) { // Search outAttrs.imeOptions |= EditorInfo.IME_ACTION_SEARCH; } else if (imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypeUrl) { // Url outAttrs.inputType = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_URI; outAttrs.imeOptions |= EditorInfo.IME_ACTION_GO; } else if (imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypeEmail) { // Email outAttrs.inputType = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_WEB_EMAIL_ADDRESS; outAttrs.imeOptions |= EditorInfo.IME_ACTION_GO; } else if (imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypeTel) { // Telephone // Number and telephone do not have both a Tab key and an // action in default OSK, so set the action to NEXT outAttrs.inputType = InputType.TYPE_CLASS_PHONE; outAttrs.imeOptions |= EditorInfo.IME_ACTION_NEXT; } else if (imeAdapter.getTextInputType() == ImeAdapter.sTextInputTypeNumber) { // Number outAttrs.inputType = InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_VARIATION_NORMAL | InputType.TYPE_NUMBER_FLAG_DECIMAL; outAttrs.imeOptions |= EditorInfo.IME_ACTION_NEXT; } outAttrs.initialSelStart = Selection.getSelectionStart(mEditable); outAttrs.initialSelEnd = Selection.getSelectionEnd(mEditable); mLastUpdateSelectionStart = Selection.getSelectionStart(mEditable); mLastUpdateSelectionEnd = Selection.getSelectionEnd(mEditable); Selection.setSelection(mEditable, outAttrs.initialSelStart, outAttrs.initialSelEnd); updateSelectionIfRequired(); } /** * Updates the AdapterInputConnection's internal representation of the text being edited and * its selection and composition properties. The resulting Editable is accessible through the * getEditable() method. If the text has not changed, this also calls updateSelection on the * InputMethodManager. * * @param text The String contents of the field being edited. * @param selectionStart The character offset of the selection start, or the caret position if * there is no selection. * @param selectionEnd The character offset of the selection end, or the caret position if there * is no selection. * @param compositionStart The character offset of the composition start, or -1 if there is no * composition. * @param compositionEnd The character offset of the composition end, or -1 if there is no * selection. * @param isNonImeChange True when the update was caused by non-IME (e.g. Javascript). */ @VisibleForTesting public void updateState(String text, int selectionStart, int selectionEnd, int compositionStart, int compositionEnd, boolean isNonImeChange) { if (DEBUG) { Log.w(TAG, "updateState [" + text + "] [" + selectionStart + " " + selectionEnd + "] [" + compositionStart + " " + compositionEnd + "] [" + isNonImeChange + "]"); } // If this update is from the IME, no further state modification is necessary because the // state should have been updated already by the IM framework directly. if (!isNonImeChange) return; // Non-breaking spaces can cause the IME to get confused. Replace with normal spaces. text = text.replace('\u00A0', ' '); selectionStart = Math.min(selectionStart, text.length()); selectionEnd = Math.min(selectionEnd, text.length()); compositionStart = Math.min(compositionStart, text.length()); compositionEnd = Math.min(compositionEnd, text.length()); String prevText = mEditable.toString(); boolean textUnchanged = prevText.equals(text); if (!textUnchanged) { mEditable.replace(0, mEditable.length(), text); } Selection.setSelection(mEditable, selectionStart, selectionEnd); if (compositionStart == compositionEnd) { removeComposingSpans(mEditable); } else { super.setComposingRegion(compositionStart, compositionEnd); } updateSelectionIfRequired(); } /** * @return Editable object which contains the state of current focused editable element. */ @Override public Editable getEditable() { return mEditable; } /** * Sends selection update to the InputMethodManager unless we are currently in a batch edit or * if the exact same selection and composition update was sent already. */ private void updateSelectionIfRequired() { if (mNumNestedBatchEdits != 0) return; int selectionStart = Selection.getSelectionStart(mEditable); int selectionEnd = Selection.getSelectionEnd(mEditable); int compositionStart = getComposingSpanStart(mEditable); int compositionEnd = getComposingSpanEnd(mEditable); // Avoid sending update if we sent an exact update already previously. if (mLastUpdateSelectionStart == selectionStart && mLastUpdateSelectionEnd == selectionEnd && mLastUpdateCompositionStart == compositionStart && mLastUpdateCompositionEnd == compositionEnd) { return; } if (DEBUG) { Log.w(TAG, "updateSelectionIfRequired [" + selectionStart + " " + selectionEnd + "] [" + compositionStart + " " + compositionEnd + "]"); } // updateSelection should be called every time the selection or composition changes // if it happens not within a batch edit, or at the end of each top level batch edit. getInputMethodManagerWrapper().updateSelection(mInternalView, selectionStart, selectionEnd, compositionStart, compositionEnd); mLastUpdateSelectionStart = selectionStart; mLastUpdateSelectionEnd = selectionEnd; mLastUpdateCompositionStart = compositionStart; mLastUpdateCompositionEnd = compositionEnd; } /** * @see BaseInputConnection#setComposingText(java.lang.CharSequence, int) */ @Override public boolean setComposingText(CharSequence text, int newCursorPosition) { if (DEBUG) Log.w(TAG, "setComposingText [" + text + "] [" + newCursorPosition + "]"); if (maybePerformEmptyCompositionWorkaround(text)) return true; super.setComposingText(text, newCursorPosition); updateSelectionIfRequired(); return mImeAdapter.checkCompositionQueueAndCallNative(text.toString(), newCursorPosition, false); } /** * @see BaseInputConnection#commitText(java.lang.CharSequence, int) */ @Override public boolean commitText(CharSequence text, int newCursorPosition) { if (DEBUG) Log.w(TAG, "commitText [" + text + "] [" + newCursorPosition + "]"); if (maybePerformEmptyCompositionWorkaround(text)) return true; super.commitText(text, newCursorPosition); updateSelectionIfRequired(); return mImeAdapter.checkCompositionQueueAndCallNative(text.toString(), newCursorPosition, text.length() > 0); } /** * @see BaseInputConnection#performEditorAction(int) */ @Override public boolean performEditorAction(int actionCode) { if (DEBUG) Log.w(TAG, "performEditorAction [" + actionCode + "]"); if (actionCode == EditorInfo.IME_ACTION_NEXT) { restartInput(); // Send TAB key event long timeStampMs = SystemClock.uptimeMillis(); mImeAdapter.sendSyntheticKeyEvent( ImeAdapter.sEventTypeRawKeyDown, timeStampMs, KeyEvent.KEYCODE_TAB, 0); } else { mImeAdapter.sendKeyEventWithKeyCode(KeyEvent.KEYCODE_ENTER, KeyEvent.FLAG_SOFT_KEYBOARD | KeyEvent.FLAG_KEEP_TOUCH_MODE | KeyEvent.FLAG_EDITOR_ACTION); } return true; } /** * @see BaseInputConnection#performContextMenuAction(int) */ @Override public boolean performContextMenuAction(int id) { if (DEBUG) Log.w(TAG, "performContextMenuAction [" + id + "]"); switch (id) { case android.R.id.selectAll: return mImeAdapter.selectAll(); case android.R.id.cut: return mImeAdapter.cut(); case android.R.id.copy: return mImeAdapter.copy(); case android.R.id.paste: return mImeAdapter.paste(); default: return false; } } /** * @see BaseInputConnection#getExtractedText(android.view.inputmethod.ExtractedTextRequest, * int) */ @Override public ExtractedText getExtractedText(ExtractedTextRequest request, int flags) { if (DEBUG) Log.w(TAG, "getExtractedText"); ExtractedText et = new ExtractedText(); et.text = mEditable.toString(); et.partialEndOffset = mEditable.length(); et.selectionStart = Selection.getSelectionStart(mEditable); et.selectionEnd = Selection.getSelectionEnd(mEditable); et.flags = mSingleLine ? ExtractedText.FLAG_SINGLE_LINE : 0; return et; } /** * @see BaseInputConnection#beginBatchEdit() */ @Override public boolean beginBatchEdit() { if (DEBUG) Log.w(TAG, "beginBatchEdit [" + (mNumNestedBatchEdits == 0) + "]"); mNumNestedBatchEdits++; return true; } /** * @see BaseInputConnection#endBatchEdit() */ @Override public boolean endBatchEdit() { if (mNumNestedBatchEdits == 0) return false; --mNumNestedBatchEdits; if (DEBUG) Log.w(TAG, "endBatchEdit [" + (mNumNestedBatchEdits == 0) + "]"); if (mNumNestedBatchEdits == 0) updateSelectionIfRequired(); return mNumNestedBatchEdits != 0; } /** * @see BaseInputConnection#deleteSurroundingText(int, int) */ @Override public boolean deleteSurroundingText(int beforeLength, int afterLength) { if (DEBUG) { Log.w(TAG, "deleteSurroundingText [" + beforeLength + " " + afterLength + "]"); } int availableBefore = Selection.getSelectionStart(mEditable); int availableAfter = mEditable.length() - Selection.getSelectionEnd(mEditable); beforeLength = Math.min(beforeLength, availableBefore); afterLength = Math.min(afterLength, availableAfter); super.deleteSurroundingText(beforeLength, afterLength); updateSelectionIfRequired(); return mImeAdapter.deleteSurroundingText(beforeLength, afterLength); } /** * @see BaseInputConnection#sendKeyEvent(android.view.KeyEvent) */ @Override public boolean sendKeyEvent(KeyEvent event) { if (DEBUG) { Log.w(TAG, "sendKeyEvent [" + event.getAction() + "] [" + event.getKeyCode() + "]"); } // If this is a key-up, and backspace/del or if the key has a character representation, // need to update the underlying Editable (i.e. the local representation of the text // being edited). if (event.getAction() == KeyEvent.ACTION_UP) { if (event.getKeyCode() == KeyEvent.KEYCODE_DEL) { deleteSurroundingText(1, 0); return true; } else if (event.getKeyCode() == KeyEvent.KEYCODE_FORWARD_DEL) { deleteSurroundingText(0, 1); return true; } else { int unicodeChar = event.getUnicodeChar(); if (unicodeChar != 0) { int selectionStart = Selection.getSelectionStart(mEditable); int selectionEnd = Selection.getSelectionEnd(mEditable); if (selectionStart > selectionEnd) { int temp = selectionStart; selectionStart = selectionEnd; selectionEnd = temp; } mEditable.replace(selectionStart, selectionEnd, Character.toString((char) unicodeChar)); } } } else if (event.getAction() == KeyEvent.ACTION_DOWN) { // TODO(aurimas): remove this workaround when crbug.com/278584 is fixed. if (event.getKeyCode() == KeyEvent.KEYCODE_ENTER) { beginBatchEdit(); finishComposingText(); mImeAdapter.translateAndSendNativeEvents(event); endBatchEdit(); return true; } else if (event.getKeyCode() == KeyEvent.KEYCODE_DEL) { return true; } else if (event.getKeyCode() == KeyEvent.KEYCODE_FORWARD_DEL) { return true; } } mImeAdapter.translateAndSendNativeEvents(event); return true; } /** * @see BaseInputConnection#finishComposingText() */ @Override public boolean finishComposingText() { if (DEBUG) Log.w(TAG, "finishComposingText"); if (getComposingSpanStart(mEditable) == getComposingSpanEnd(mEditable)) { return true; } super.finishComposingText(); updateSelectionIfRequired(); mImeAdapter.finishComposingText(); return true; } /** * @see BaseInputConnection#setSelection(int, int) */ @Override public boolean setSelection(int start, int end) { if (DEBUG) Log.w(TAG, "setSelection [" + start + " " + end + "]"); int textLength = mEditable.length(); if (start < 0 || end < 0 || start > textLength || end > textLength) return true; super.setSelection(start, end); updateSelectionIfRequired(); return mImeAdapter.setEditableSelectionOffsets(start, end); } /** * Informs the InputMethodManager and InputMethodSession (i.e. the IME) that the text * state is no longer what the IME has and that it needs to be updated. */ void restartInput() { if (DEBUG) Log.w(TAG, "restartInput"); getInputMethodManagerWrapper().restartInput(mInternalView); mNumNestedBatchEdits = 0; } /** * @see BaseInputConnection#setComposingRegion(int, int) */ @Override public boolean setComposingRegion(int start, int end) { if (DEBUG) Log.w(TAG, "setComposingRegion [" + start + " " + end + "]"); int textLength = mEditable.length(); int a = Math.min(start, end); int b = Math.max(start, end); if (a < 0) a = 0; if (b < 0) b = 0; if (a > textLength) a = textLength; if (b > textLength) b = textLength; if (a == b) { removeComposingSpans(mEditable); } else { super.setComposingRegion(a, b); } updateSelectionIfRequired(); return mImeAdapter.setComposingRegion(a, b); } boolean isActive() { return getInputMethodManagerWrapper().isActive(mInternalView); } private InputMethodManagerWrapper getInputMethodManagerWrapper() { return mImeAdapter.getInputMethodManagerWrapper(); } /** * This method works around the issue crbug.com/373934 where Blink does not cancel * the composition when we send a commit with the empty text. * * TODO(aurimas) Remove this once crbug.com/373934 is fixed. * * @param text Text that software keyboard requested to commit. * @return Whether the workaround was performed. */ private boolean maybePerformEmptyCompositionWorkaround(CharSequence text) { int selectionStart = Selection.getSelectionStart(mEditable); int selectionEnd = Selection.getSelectionEnd(mEditable); int compositionStart = getComposingSpanStart(mEditable); int compositionEnd = getComposingSpanEnd(mEditable); if (TextUtils.isEmpty(text) && (selectionStart == selectionEnd) && compositionStart != INVALID_COMPOSITION && compositionEnd != INVALID_COMPOSITION) { beginBatchEdit(); finishComposingText(); int selection = Selection.getSelectionStart(mEditable); deleteSurroundingText(selection - compositionStart, selection - compositionEnd); endBatchEdit(); return true; } return false; } @VisibleForTesting static class ImeState { public final String text; public final int selectionStart; public final int selectionEnd; public final int compositionStart; public final int compositionEnd; public ImeState(String text, int selectionStart, int selectionEnd, int compositionStart, int compositionEnd) { this.text = text; this.selectionStart = selectionStart; this.selectionEnd = selectionEnd; this.compositionStart = compositionStart; this.compositionEnd = compositionEnd; } } @VisibleForTesting ImeState getImeStateForTesting() { String text = mEditable.toString(); int selectionStart = Selection.getSelectionStart(mEditable); int selectionEnd = Selection.getSelectionEnd(mEditable); int compositionStart = getComposingSpanStart(mEditable); int compositionEnd = getComposingSpanEnd(mEditable); return new ImeState(text, selectionStart, selectionEnd, compositionStart, compositionEnd); } }
package com.thimbleware.jmemcached.storage.bytebuffer; import com.thimbleware.jmemcached.Key; import com.thimbleware.jmemcached.LocalCacheElement; import com.thimbleware.jmemcached.storage.CacheStorage; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import java.io.IOException; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * Implementation of the concurrent (linked) sized map using the block buffer storage back end. * */ public final class BlockStorageCacheStorage implements CacheStorage<Key, LocalCacheElement> { Partition[] partitions; final AtomicInteger ceilingBytes; final AtomicInteger maximumItems; final AtomicInteger numberItems; final long maximumSizeBytes; final static class Buckets { List<Region> regions = new LinkedList<Region>(); } /** * Applies a supplemental hash function to a given hashCode, which * defends against poor quality hash functions. This is critical * because ConcurrentHashMap uses power-of-two length hash tables, * that otherwise encounter collisions for hashCodes that do not * differ in lower or upper bits. */ protected static int hash(int h) { // Spread bits to regularize both segment and index locations, // using variant of single-word Wang/Jenkins hash. h += (h << 15) ^ 0xffffcd7d; h ^= (h >>> 10); h += (h << 3); h ^= (h >>> 6); h += (h << 2) + (h << 14); return h ^ (h >>> 16); } final static class Partition { private static final int NUM_BUCKETS = 32768; ReentrantReadWriteLock storageLock = new ReentrantReadWriteLock(); Buckets[] buckets = new Buckets[NUM_BUCKETS]; ByteBufferBlockStore blockStore; int numberItems; Partition(ByteBufferBlockStore blockStore) { this.blockStore = blockStore; for (int i = 0; i < NUM_BUCKETS; i++) buckets[i] = new Buckets(); } public Region find(Key key) { int bucket = Math.abs(hash(key.hashCode()) % buckets.length); for (Region region : buckets[bucket].regions) { if (region.sameAs(key, blockStore)) return region; } return null; } public void remove(Key key, Region region) { int bucket = Math.abs(hash(key.hashCode()) % buckets.length); buckets[bucket].regions.remove(region); numberItems } public Region add(Key key, LocalCacheElement e) { ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); e.writeToBuffer(buffer); Region region = blockStore.alloc(buffer.capacity(), buffer); int bucket = Math.abs(hash(key.hashCode()) % buckets.length); buckets[bucket].regions.add(region); numberItems++; // check # buckets, trigger resize // if ((double)numberItems * 0.75 > buckets.length) // System.err.println("grow"); return region; } public void clear() { for (Buckets bucket : buckets) { bucket.regions.clear(); } blockStore.clear(); numberItems = 0; } public Collection<Key> keys() { Set<Key> keys = new HashSet<Key>(); for (Buckets bucket : buckets) { for (Region region : bucket.regions) { keys.add(region.keyFromRegion(blockStore)); } } return keys; } public int getNumberItems() { return numberItems; } } public BlockStorageCacheStorage(int blockStoreBuckets, int ceilingBytesParam, int blockSizeBytes, long maximumSizeBytes, int maximumItemsVal, BlockStoreFactory factory) { this.partitions = new Partition[blockStoreBuckets]; long bucketSizeBytes = maximumSizeBytes / blockStoreBuckets; for (int i = 0; i < blockStoreBuckets; i++) { this.partitions[i] = new Partition(factory.manufacture(bucketSizeBytes, blockSizeBytes)); } this.numberItems = new AtomicInteger(); this.ceilingBytes = new AtomicInteger(ceilingBytesParam); this.maximumItems = new AtomicInteger(maximumItemsVal); this.maximumSizeBytes = maximumSizeBytes; } private Partition pickPartition(Key key) { return partitions[Math.abs(hash(key.hashCode())) % partitions.length]; } public final long getMemoryCapacity() { long capacity = 0; for (Partition byteBufferBlockStore : partitions) { capacity += byteBufferBlockStore.blockStore.getStoreSizeBytes(); } return capacity; } public final long getMemoryUsed() { long memUsed = 0; for (Partition byteBufferBlockStore : partitions) { memUsed += (byteBufferBlockStore.blockStore.getStoreSizeBytes() - byteBufferBlockStore.blockStore.getFreeBytes()); } return memUsed; } public final int capacity() { return maximumItems.get(); } public final void close() throws IOException { // first clear all items clear(); // then ask the block store to close for (Partition byteBufferBlockStore : partitions) { byteBufferBlockStore.blockStore.close(); } this.partitions = null; } public final LocalCacheElement putIfAbsent(Key key, LocalCacheElement item) { Partition partition = pickPartition(key); Region region; partition.storageLock.readLock().lock(); try { region = partition.find(key); // not there? add it if (region == null) { partition.storageLock.readLock().unlock(); partition.storageLock.writeLock().lock(); try { numberItems.incrementAndGet(); partition.add(key, item); } finally { partition.storageLock.readLock().lock(); partition.storageLock.writeLock().unlock(); } return null; } else { // there? return its value return region.toValue(partition.blockStore); } } finally { partition.storageLock.readLock().unlock(); } } /** * {@inheritDoc} */ public final boolean remove(Object okey, Object value) { if (!(okey instanceof Key) || (!(value instanceof LocalCacheElement))) return false; Key key = (Key) okey; Partition partition = pickPartition(key); Region region; try { partition.storageLock.readLock().lock(); region = partition.find(key); if (region == null) return false; else { partition.storageLock.readLock().unlock(); partition.storageLock.writeLock().lock(); try { partition.blockStore.free(region); partition.remove(key, region); numberItems.decrementAndGet(); return true; } finally { partition.storageLock.readLock().lock(); partition.storageLock.writeLock().unlock(); } } } finally { partition.storageLock.readLock().unlock(); } } public final boolean replace(Key key, LocalCacheElement original, LocalCacheElement replace) { Partition partition = pickPartition(key); Region region; partition.storageLock.readLock().lock(); try { region = partition.find(key); // not there? that's a fail if (region == null) return false; // there, check for equivalence of value LocalCacheElement el = null; el = region.toValue(partition.blockStore); if (!el.equals(original)) { return false; } else { partition.storageLock.readLock().unlock(); partition.storageLock.writeLock().lock(); try { partition.remove(key, region); partition.add(key, replace); return true; } finally { partition.storageLock.readLock().lock(); partition.storageLock.writeLock().unlock(); } } } finally { partition.storageLock.readLock().unlock(); } } public final LocalCacheElement replace(Key key, LocalCacheElement replace) { Partition partition = pickPartition(key); Region region; partition.storageLock.readLock().lock(); try { region = partition.find(key); // not there? that's a fail if (region == null) return null; // there, LocalCacheElement el = null; el = region.toValue(partition.blockStore); partition.storageLock.readLock().unlock(); partition.storageLock.writeLock().lock(); try { partition.remove(key, region); partition.add(key, replace); return el; } finally { partition.storageLock.readLock().lock(); partition.storageLock.writeLock().unlock(); } } finally { partition.storageLock.readLock().unlock(); } } public final int size() { return numberItems.get(); } public final boolean isEmpty() { return numberItems.get() == 0; } public final boolean containsKey(Object okey) { if (!(okey instanceof Key)) return false; Key key = (Key) okey; Partition partition = pickPartition(key); Region region; try { partition.storageLock.readLock().lock(); region = partition.find(key); return region != null; } finally { partition.storageLock.readLock().unlock(); } } public final boolean containsValue(Object o) { throw new UnsupportedOperationException("operation not supported"); } public final LocalCacheElement get(Object okey) { if (!(okey instanceof Key)) return null; Key key = (Key) okey; Partition partition = pickPartition(key); Region region; try { partition.storageLock.readLock().lock(); region = partition.find(key); if (region == null) return null; return region.toValue(partition.blockStore); } finally { partition.storageLock.readLock().unlock(); } } public final LocalCacheElement put(final Key key, final LocalCacheElement item) { Partition partition = pickPartition(key); Region region; partition.storageLock.readLock().lock(); try { region = partition.find(key); partition.storageLock.readLock().unlock(); partition.storageLock.writeLock().lock(); try { LocalCacheElement old = null; if (region != null) { old = region.toValue(partition.blockStore); } if (region != null) partition.remove(key, region); partition.add(key, item); numberItems.incrementAndGet(); return old; } finally { partition.storageLock.readLock().lock(); partition.storageLock.writeLock().unlock(); } } finally { partition.storageLock.readLock().unlock(); } } public final LocalCacheElement remove(Object okey) { if (!(okey instanceof Key)) return null; Key key = (Key) okey; Partition partition = pickPartition(key); Region region; try { partition.storageLock.readLock().lock(); region = partition.find(key); if (region == null) return null; else { partition.storageLock.readLock().unlock(); partition.storageLock.writeLock().lock(); try { LocalCacheElement old = null; old = region.toValue(partition.blockStore); partition.blockStore.free(region); partition.remove(key, region); numberItems.decrementAndGet(); return old; } finally { partition.storageLock.readLock().lock(); partition.storageLock.writeLock().unlock(); } } } finally { partition.storageLock.readLock().unlock(); } } public final void putAll(Map<? extends Key, ? extends LocalCacheElement> map) { // absent, lock the store and put the new value in for (Entry<? extends Key, ? extends LocalCacheElement> entry : map.entrySet()) { Key key = entry.getKey(); LocalCacheElement item; item = entry.getValue(); put(key, item); } } public final void clear() { for (Partition partition : partitions) { partition.storageLock.writeLock().lock(); numberItems.addAndGet(partition.keys().size() * - 1); try { partition.clear(); } finally { partition.storageLock.writeLock().unlock(); } } } public Set<Key> keySet() { Set<Key> keys = new HashSet<Key>(); for (Partition partition : partitions) { keys.addAll(partition.keys()); } return keys; } public Collection<LocalCacheElement> values() { throw new UnsupportedOperationException("operation not supported"); } public Set<Entry<Key, LocalCacheElement>> entrySet() { throw new UnsupportedOperationException("operation not supported"); } }
package org.hisp.dhis.webapi.controller; import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; import java.io.IOException; import java.util.Set; import java.util.UUID; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; import org.hisp.dhis.appmanager.AppManager; import org.hisp.dhis.common.BaseIdentifiableObject; import org.hisp.dhis.common.DhisApiVersion; import org.hisp.dhis.common.IdentifiableObjectManager; import org.hisp.dhis.configuration.Configuration; import org.hisp.dhis.configuration.ConfigurationService; import org.hisp.dhis.dataelement.DataElementGroup; import org.hisp.dhis.external.conf.DhisConfigurationProvider; import org.hisp.dhis.indicator.IndicatorGroup; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet; import org.hisp.dhis.organisationunit.OrganisationUnitLevel; import org.hisp.dhis.period.PeriodService; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.render.RenderService; import org.hisp.dhis.setting.SettingKey; import org.hisp.dhis.setting.SystemSettingManager; import org.hisp.dhis.user.UserGroup; import org.hisp.dhis.user.UserRole; import org.hisp.dhis.util.ObjectUtils; import org.hisp.dhis.webapi.controller.exception.NotFoundException; import org.hisp.dhis.webapi.mvc.annotation.ApiVersion; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; /** * @author Lars Helge Overland */ @Controller @RequestMapping( "/configuration" ) @ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } ) public class ConfigurationController { @Autowired private ConfigurationService configurationService; @Autowired private DhisConfigurationProvider config; @Autowired private IdentifiableObjectManager identifiableObjectManager; @Autowired private PeriodService periodService; @Autowired private RenderService renderService; @Autowired private SystemSettingManager systemSettingManager; @Autowired private AppManager appManager; // Resources @GetMapping public @ResponseBody Configuration getConfiguration( Model model, HttpServletRequest request ) { return configurationService.getConfiguration(); } @ResponseStatus( value = HttpStatus.OK ) @GetMapping( "/systemId" ) public @ResponseBody String getSystemId( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getSystemId(); } @PreAuthorize( "hasRole('ALL')" ) @PostMapping( "/systemId" ) @ResponseStatus( value = HttpStatus.NO_CONTENT ) public void setSystemId( @RequestBody( required = false ) String systemId ) { systemId = ObjectUtils.firstNonNull( systemId, UUID.randomUUID().toString() ); Configuration configuration = configurationService.getConfiguration(); configuration.setSystemId( systemId ); configurationService.setConfiguration( configuration ); } @GetMapping( "/feedbackRecipients" ) public @ResponseBody UserGroup getFeedbackRecipients( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getFeedbackRecipients(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/feedbackRecipients" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setFeedbackRecipients( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); UserGroup group = identifiableObjectManager.get( UserGroup.class, uid ); if ( group == null ) { throw new NotFoundException( "User group", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setFeedbackRecipients( group ); configurationService.setConfiguration( configuration ); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @DeleteMapping( "/feedbackRecipients" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void removeFeedbackRecipients() { Configuration configuration = configurationService.getConfiguration(); configuration.setFeedbackRecipients( null ); configurationService.setConfiguration( configuration ); } @GetMapping( "/systemUpdateNotificationRecipients" ) public @ResponseBody UserGroup getSystemUpdateNotificationRecipients( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getSystemUpdateNotificationRecipients(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/systemUpdateNotificationRecipients" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setSystemUpdateNotificationRecipients( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); UserGroup group = identifiableObjectManager.get( UserGroup.class, uid ); if ( group == null ) { throw new NotFoundException( "User group", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setSystemUpdateNotificationRecipients( group ); configurationService.setConfiguration( configuration ); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @DeleteMapping( "/systemUpdateNotificationRecipients" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void removeSystemUpdateNotificationRecipients() { Configuration configuration = configurationService.getConfiguration(); configuration.setSystemUpdateNotificationRecipients( null ); configurationService.setConfiguration( configuration ); } @GetMapping( "/offlineOrganisationUnitLevel" ) public @ResponseBody OrganisationUnitLevel getOfflineOrganisationUnitLevel( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getOfflineOrganisationUnitLevel(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/offlineOrganisationUnitLevel" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setOfflineOrganisationUnitLevel( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); OrganisationUnitLevel organisationUnitLevel = identifiableObjectManager.get( OrganisationUnitLevel.class, uid ); if ( organisationUnitLevel == null ) { throw new NotFoundException( "Organisation unit level", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setOfflineOrganisationUnitLevel( organisationUnitLevel ); configurationService.setConfiguration( configuration ); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @DeleteMapping( "/offlineOrganisationUnitLevel" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void removeOfflineOrganisationUnitLevel() { Configuration configuration = configurationService.getConfiguration(); configuration.setOfflineOrganisationUnitLevel( null ); configurationService.setConfiguration( configuration ); } @GetMapping( "/infrastructuralIndicators" ) public @ResponseBody IndicatorGroup getInfrastructuralIndicators( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getInfrastructuralIndicators(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/infrastructuralIndicators" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setInfrastructuralIndicators( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); IndicatorGroup group = identifiableObjectManager.get( IndicatorGroup.class, uid ); if ( group == null ) { throw new NotFoundException( "Indicator group", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setInfrastructuralIndicators( group ); configurationService.setConfiguration( configuration ); } @GetMapping( "/infrastructuralDataElements" ) public @ResponseBody DataElementGroup getInfrastructuralDataElements( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getInfrastructuralDataElements(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/infrastructuralDataElements" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setInfrastructuralDataElements( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); DataElementGroup group = identifiableObjectManager.get( DataElementGroup.class, uid ); if ( group == null ) { throw new NotFoundException( "Data element group", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setInfrastructuralDataElements( group ); configurationService.setConfiguration( configuration ); } @GetMapping( "/infrastructuralPeriodType" ) public @ResponseBody BaseIdentifiableObject getInfrastructuralPeriodType( Model model, HttpServletRequest request ) { String name = configurationService.getConfiguration().getInfrastructuralPeriodTypeDefaultIfNull().getName(); return new BaseIdentifiableObject( name, name, name ); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/infrastructuralPeriodType" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setInfrastructuralPeriodType( @RequestBody String name ) throws NotFoundException { name = trim( name ); PeriodType periodType = PeriodType.getPeriodTypeByName( name ); if ( periodType == null ) { throw new NotFoundException( "Period type", name ); } Configuration configuration = configurationService.getConfiguration(); periodType = periodService.reloadPeriodType( periodType ); configuration.setInfrastructuralPeriodType( periodType ); configurationService.setConfiguration( configuration ); } @GetMapping( "/selfRegistrationRole" ) public @ResponseBody UserRole getSelfRegistrationRole( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getSelfRegistrationRole(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/selfRegistrationRole" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setSelfRegistrationRole( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); UserRole userGroup = identifiableObjectManager.get( UserRole.class, uid ); if ( userGroup == null ) { throw new NotFoundException( "User authority group", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setSelfRegistrationRole( userGroup ); configurationService.setConfiguration( configuration ); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @DeleteMapping( "/selfRegistrationRole" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void removeSelfRegistrationRole() { Configuration configuration = configurationService.getConfiguration(); configuration.setSelfRegistrationRole( null ); configurationService.setConfiguration( configuration ); } @GetMapping( "/selfRegistrationOrgUnit" ) public @ResponseBody OrganisationUnit getSelfRegistrationOrgUnit( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getSelfRegistrationOrgUnit(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/selfRegistrationOrgUnit" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setSelfRegistrationOrgUnit( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); OrganisationUnit orgunit = identifiableObjectManager.get( OrganisationUnit.class, uid ); if ( orgunit == null ) { throw new NotFoundException( "Organisation unit", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setSelfRegistrationOrgUnit( orgunit ); configurationService.setConfiguration( configuration ); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @DeleteMapping( "/selfRegistrationOrgUnit" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void removeSelfRegistrationOrgUnit() { Configuration configuration = configurationService.getConfiguration(); configuration.setSelfRegistrationOrgUnit( null ); configurationService.setConfiguration( configuration ); } @GetMapping( "/remoteServerUrl" ) public @ResponseBody String getRemoteServerUrl( Model model, HttpServletRequest request ) { return systemSettingManager.getStringSetting( SettingKey.REMOTE_INSTANCE_URL ); } @GetMapping( "/remoteServerUsername" ) public @ResponseBody String getRemoteServerUsername( Model model, HttpServletRequest request ) { return systemSettingManager.getStringSetting( SettingKey.REMOTE_INSTANCE_USERNAME ); } @GetMapping( "/facilityOrgUnitGroupSet" ) public @ResponseBody OrganisationUnitGroupSet getFacilityOrgUnitGroupSet() { return configurationService.getConfiguration().getFacilityOrgUnitGroupSet(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/facilityOrgUnitGroupSet" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setFacilityOrgUnitGroupSet( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); OrganisationUnitGroupSet groupSet = identifiableObjectManager.get( OrganisationUnitGroupSet.class, uid ); if ( groupSet == null ) { throw new NotFoundException( "Organisation unit group sets", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setFacilityOrgUnitGroupSet( groupSet ); configurationService.setConfiguration( configuration ); } @GetMapping( "/facilityOrgUnitLevel" ) public @ResponseBody OrganisationUnitLevel getFacilityOrgUnitLevel() { return configurationService.getConfiguration().getFacilityOrgUnitLevel(); } @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( "/facilityOrgUnitLevel" ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setFacilityOrgUnitLevel( @RequestBody String uid ) throws NotFoundException { uid = trim( uid ); OrganisationUnitLevel level = identifiableObjectManager.get( OrganisationUnitLevel.class, uid ); if ( level == null ) { throw new NotFoundException( "Organisation unit level", uid ); } Configuration configuration = configurationService.getConfiguration(); configuration.setFacilityOrgUnitLevel( level ); configurationService.setConfiguration( configuration ); } @GetMapping( value = "/corsWhitelist", produces = APPLICATION_JSON_VALUE ) public @ResponseBody Set<String> getCorsWhitelist( Model model, HttpServletRequest request ) { return configurationService.getConfiguration().getCorsWhitelist(); } @SuppressWarnings( "unchecked" ) @PreAuthorize( "hasRole('ALL') or hasRole('F_SYSTEM_SETTING')" ) @PostMapping( value = "/corsWhitelist", consumes = APPLICATION_JSON_VALUE ) @ResponseStatus( HttpStatus.NO_CONTENT ) public void setCorsWhitelist( @RequestBody String input ) throws IOException { Set<String> corsWhitelist = renderService.fromJson( input, Set.class ); Configuration configuration = configurationService.getConfiguration(); configuration.setCorsWhitelist( corsWhitelist ); configurationService.setConfiguration( configuration ); } @GetMapping( "/systemReadOnlyMode" ) public @ResponseBody boolean getSystemReadOnlyMode( Model model, HttpServletRequest request ) { return config.isReadOnlyMode(); } @GetMapping( "/appHubUrl" ) public @ResponseBody String getAppHubUrl( Model model, HttpServletRequest request ) { return appManager.getAppHubUrl(); } /** * Trims the given string payload by removing double qoutes. * * @param string the string. * @return a trimmed string. */ private String trim( String string ) { return StringUtils.remove( string, "\"" ); } }
package com.baidu.disconf.web.service.config.service.impl; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import com.baidu.disconf.core.common.constants.Constants; import com.baidu.disconf.core.common.constants.DisConfigTypeEnum; import com.baidu.disconf.core.common.json.ValueVo; import com.baidu.disconf.web.innerapi.zookeeper.ZooKeeperDriver; import com.baidu.disconf.web.service.app.bo.App; import com.baidu.disconf.web.service.app.service.AppMgr; import com.baidu.disconf.web.service.config.bo.Config; import com.baidu.disconf.web.service.config.dao.ConfigDao; import com.baidu.disconf.web.service.config.form.ConfListForm; import com.baidu.disconf.web.service.config.form.ConfNewItemForm; import com.baidu.disconf.web.service.config.service.ConfigMgr; import com.baidu.disconf.web.service.config.utils.ConfigUtils; import com.baidu.disconf.web.service.config.vo.ConfListVo; import com.baidu.disconf.web.service.config.vo.MachineListVo; import com.baidu.disconf.web.service.env.bo.Env; import com.baidu.disconf.web.service.env.service.EnvMgr; import com.baidu.disconf.web.service.zookeeper.config.ZooConfig; import com.baidu.disconf.web.service.zookeeper.dto.ZkDisconfData; import com.baidu.disconf.web.service.zookeeper.dto.ZkDisconfData.ZkDisconfDataItem; import com.baidu.disconf.web.service.zookeeper.service.ZkDeployMgr; import com.baidu.dsp.common.constant.DataFormatConstants; import com.baidu.dsp.common.utils.DataTransfer; import com.baidu.dsp.common.utils.ServiceUtil; import com.baidu.ub.common.db.DaoPageResult; import com.github.knightliao.apollo.utils.data.GsonUtils; import com.github.knightliao.apollo.utils.io.OsUtil; import com.github.knightliao.apollo.utils.time.DateUtils; /** * * @author liaoqiqi * @version 2014-6-16 */ @Service public class ConfigMgrImpl implements ConfigMgr { protected static final Logger LOG = LoggerFactory.getLogger(ConfigMgrImpl.class); @Autowired private ConfigDao configDao; @Autowired private AppMgr appMgr; @Autowired private EnvMgr envMgr; @Autowired private ZooKeeperDriver zooKeeperDriver; @Autowired private ZooConfig zooConfig; @Autowired private ZkDeployMgr zkDeployMgr; public ValueVo getConfItemByParameter(Long appId, Long envId, String version, String key) { Config config = configDao.getByParameter(appId, envId, version, key, DisConfigTypeEnum.ITEM); if (config == null) { return ConfigUtils.getErrorVo("cannot find this config"); } ValueVo valueVo = new ValueVo(); valueVo.setValue(config.getValue()); valueVo.setStatus(Constants.OK); return valueVo; } @Override public Config getConfByParameter(Long appId, Long envId, String env, String key, DisConfigTypeEnum disConfigTypeEnum) { Config config = configDao.getByParameter(appId, envId, env, key, disConfigTypeEnum); return config; } /** * APPid */ @Override public List<String> getVersionListByAppEnv(Long appId, Long envId) { List<String> versionList = new ArrayList<String>(); List<Config> configs = configDao.getConfByAppEnv(appId, envId); for (Config config : configs) { if (!versionList.contains(config.getVersion())) { versionList.add(config.getVersion()); } } return versionList; } /** * * * @param confListForm * @return */ public List<File> getDisonfFileList(ConfListForm confListForm) { List<Config> configList = configDao.getConfigList(confListForm.getAppId(), confListForm.getEnvId(), confListForm.getVersion()); String curTime = DateUtils.format(new Date(), DataFormatConstants.COMMON_TIME_FORMAT); curTime = "tmp" + File.separator + curTime; OsUtil.makeDirs(curTime); List<File> files = new ArrayList<File>(); for (Config config : configList) { if (config.getType().equals(DisConfigTypeEnum.FILE.getType())) { File file = new File(curTime, config.getName()); try { FileUtils.writeByteArrayToFile(file, config.getValue().getBytes()); } catch (IOException e) { LOG.warn(e.toString()); } files.add(file); } } return files; } @Override public DaoPageResult<ConfListVo> getConfigList(ConfListForm confListForm, boolean fetchZk) { DaoPageResult<Config> configList = configDao.getConfigList(confListForm.getAppId(), confListForm.getEnvId(), confListForm.getVersion(), confListForm.getPage()); final App app = appMgr.getById(confListForm.getAppId()); final Env env = envMgr.getById(confListForm.getEnvId()); final boolean myFetchZk = fetchZk; Map<String, ZkDisconfData> zkDataMap = new HashMap<String, ZkDisconfData>(); if (myFetchZk) { zkDataMap = zkDeployMgr.getZkDisconfDataMap(app.getName(), env.getName(), confListForm.getVersion()); } final Map<String, ZkDisconfData> myzkDataMap = zkDataMap; DaoPageResult<ConfListVo> configListVo = ServiceUtil.getResult(configList, new DataTransfer<Config, ConfListVo>() { @Override public ConfListVo transfer(Config input) { String appNameString = app.getName(); String envName = env.getName(); ZkDisconfData zkDisconfData = null; if (myzkDataMap != null && myzkDataMap.keySet().contains(input.getName())) { zkDisconfData = myzkDataMap.get(input.getName()); } ConfListVo configListVo = convert(input, appNameString, envName, zkDisconfData); if (myFetchZk) { // value "" configListVo.setValue(""); configListVo.setMachineList(new ArrayList<ZkDisconfData.ZkDisconfDataItem>()); } return configListVo; } }); return configListVo; } private MachineListVo getZkData(List<ZkDisconfDataItem> datalist, Config config) { int errorNum = 0; for (ZkDisconfDataItem zkDisconfDataItem : datalist) { if (config.getType().equals(DisConfigTypeEnum.FILE.getType())) { List<String> errorKeyList = compareConifg(zkDisconfDataItem.getValue(), config.getValue()); if (errorKeyList.size() != 0) { zkDisconfDataItem.setErrorList(errorKeyList); errorNum++; } } else { if (zkDisconfDataItem.getValue().trim().equals(config.getValue().trim())) { } else { List<String> errorKeyList = new ArrayList<String>(); errorKeyList.add(config.getValue().trim()); zkDisconfDataItem.setErrorList(errorKeyList); errorNum++; } } } MachineListVo machineListVo = new MachineListVo(); machineListVo.setDatalist(datalist); machineListVo.setErrorNum(errorNum); machineListVo.setMachineSize(datalist.size()); return machineListVo; } /** * * * @param config * @return */ private ConfListVo convert(Config config, String appNameString, String envName, ZkDisconfData zkDisconfData) { ConfListVo confListVo = new ConfListVo(); confListVo.setConfigId(config.getId()); confListVo.setAppId(config.getAppId()); confListVo.setAppName(appNameString); confListVo.setEnvName(envName); confListVo.setEnvId(config.getEnvId()); confListVo.setCreateTime(config.getCreateTime()); confListVo.setModifyTime(config.getUpdateTime().substring(0, 12)); confListVo.setKey(config.getName()); confListVo.setValue(config.getValue()); confListVo.setVersion(config.getVersion()); confListVo.setType(DisConfigTypeEnum.getByType(config.getType()).getModelName()); confListVo.setTypeId(config.getType()); if (zkDisconfData != null) { confListVo.setMachineSize(zkDisconfData.getData().size()); List<ZkDisconfDataItem> datalist = zkDisconfData.getData(); MachineListVo machineListVo = getZkData(datalist, config); confListVo.setErrorNum(machineListVo.getErrorNum()); confListVo.setMachineList(machineListVo.getDatalist()); confListVo.setMachineSize(machineListVo.getMachineSize()); } return confListVo; } private List<String> compareConifg(String zkData, String dbData) { List<String> errorKeyList = new ArrayList<String>(); Properties prop = new Properties(); try { prop.load(IOUtils.toInputStream(dbData, "UTF-8")); } catch (IOException e) { LOG.error(e.toString()); errorKeyList.add(zkData); return errorKeyList; } Map<String, String> zkMap = GsonUtils.parse2Map(zkData); for (String keyInZk : zkMap.keySet()) { Object valueInDb = prop.get(keyInZk); String zkDataStr = zkMap.get(keyInZk); try { if ((zkDataStr == null && valueInDb != null) || (zkDataStr != null && valueInDb == null)) { errorKeyList.add(keyInZk); } else { if (!zkDataStr.equals(valueInDb.toString().trim())) { errorKeyList.add(keyInZk); } } } catch (Exception e) { LOG.warn(e.toString() + " ; " + keyInZk + " ; " + zkMap.get(keyInZk) + " ; " + valueInDb); } } return errorKeyList; } @Override public ConfListVo getConfVo(Long configId) { Config config = configDao.get(configId); App app = appMgr.getById(config.getAppId()); Env env = envMgr.getById(config.getEnvId()); return convert(config, app.getName(), env.getName(), null); } /** * IDZK */ @Override public MachineListVo getConfVoWithZk(Long configId) { Config config = configDao.get(configId); App app = appMgr.getById(config.getAppId()); Env env = envMgr.getById(config.getEnvId()); DisConfigTypeEnum disConfigTypeEnum = DisConfigTypeEnum.FILE; if (config.getType().equals(DisConfigTypeEnum.ITEM.getType())) { disConfigTypeEnum = DisConfigTypeEnum.ITEM; } ZkDisconfData zkDisconfData = zkDeployMgr.getZkDisconfData(app.getName(), env.getName(), config.getVersion(), disConfigTypeEnum, config.getName()); MachineListVo machineListVo = getZkData(zkDisconfData.getData(), config); return machineListVo; } @Override public Config getConfigById(Long configId) { return configDao.get(configId); } @Override @Transactional(propagation = Propagation.REQUIRED, rollbackFor = RuntimeException.class) public void updateItemValue(Long configId, String value) { configDao.updateValue(configId, value); } /** * Zookeeper, , */ @Override public void notifyZookeeper(Long configId) { ConfListVo confListVo = getConfVo(configId); if (confListVo.getTypeId().equals(DisConfigTypeEnum.FILE.getType())) { zooKeeperDriver.notifyNodeUpdate(confListVo.getAppName(), confListVo.getEnvName(), confListVo.getVersion(), confListVo.getKey(), GsonUtils.toJson(confListVo.getValue()), DisConfigTypeEnum.FILE); } else { zooKeeperDriver.notifyNodeUpdate(confListVo.getAppName(), confListVo.getEnvName(), confListVo.getVersion(), confListVo.getKey(), confListVo.getValue(), DisConfigTypeEnum.ITEM); } } @Override public String getValue(Long configId) { return configDao.getValue(configId); } @Override public void newConfig(ConfNewItemForm confNewForm, DisConfigTypeEnum disConfigTypeEnum) { Config config = new Config(); config.setAppId(confNewForm.getAppId()); config.setEnvId(confNewForm.getEnvId()); config.setName(confNewForm.getKey()); config.setType(disConfigTypeEnum.getType()); config.setVersion(confNewForm.getVersion()); config.setValue(confNewForm.getValue()); String curTime = DateUtils.format(new Date(), DataFormatConstants.COMMON_TIME_FORMAT); config.setCreateTime(curTime); config.setUpdateTime(curTime); configDao.create(config); } @Override public void delete(Long configId) { configDao.delete(configId); } }
package org.jboss.as.ejb3.deployment.processors; import static java.security.AccessController.doPrivileged; import java.net.URI; import java.security.PrivilegedAction; import org.jboss.as.ee.component.Attachments; import org.jboss.as.ee.component.ComponentDescription; import org.jboss.as.ee.component.EEModuleDescription; import org.jboss.as.ejb3.deployment.EjbDeploymentAttachmentKeys; import org.jboss.as.ejb3.logging.EjbLogger; import org.jboss.as.ejb3.remote.EJBClientContextService; import org.jboss.as.ejb3.remote.RemotingProfileService; import org.jboss.as.remoting.AbstractOutboundConnectionService; import org.jboss.as.server.deployment.DeploymentPhaseContext; import org.jboss.as.server.deployment.DeploymentUnit; import org.jboss.as.server.deployment.DeploymentUnitProcessingException; import org.jboss.as.server.deployment.DeploymentUnitProcessor; import org.jboss.ejb.client.EJBClientContext; import org.jboss.modules.Module; import org.jboss.modules.ModuleClassLoader; import org.jboss.msc.service.Service; import org.jboss.msc.service.ServiceBuilder; import org.jboss.msc.service.ServiceName; import org.jboss.msc.service.StartContext; import org.jboss.msc.service.StartException; import org.jboss.msc.service.StopContext; import org.jboss.msc.value.InjectedValue; import org.jboss.remoting3.RemotingOptions; import org.wildfly.common.context.ContextManager; import org.wildfly.discovery.Discovery; import org.wildfly.security.auth.client.AuthenticationConfiguration; import org.wildfly.security.auth.client.AuthenticationContext; import org.wildfly.security.auth.client.AuthenticationContextConfigurationClient; import org.wildfly.security.auth.client.MatchRule; import org.xnio.OptionMap; /** * A deployment processor which associates the {@link EJBClientContext}, belonging to a deployment unit, * with the deployment unit's classloader. * * @author Stuart Douglas * @author Jaikiran Pai */ public class EjbClientContextSetupProcessor implements DeploymentUnitProcessor { private static final AuthenticationContextConfigurationClient CLIENT = doPrivileged(AuthenticationContextConfigurationClient.ACTION); @Override public void deploy(final DeploymentPhaseContext phaseContext) throws DeploymentUnitProcessingException { final DeploymentUnit deploymentUnit = phaseContext.getDeploymentUnit(); final Module module = deploymentUnit.getAttachment(org.jboss.as.server.deployment.Attachments.MODULE); if (module == null) { return; } RegistrationService registrationService = new RegistrationService(module); ServiceName registrationServiceName = deploymentUnit.getServiceName().append("ejb3","client-context","registration-service"); final ServiceName profileServiceName = deploymentUnit.getAttachment(EjbDeploymentAttachmentKeys.EJB_REMOTING_PROFILE_SERVICE_NAME); final ServiceBuilder<Void> builder = phaseContext.getServiceTarget().addService(registrationServiceName, registrationService) .addDependency(getEJBClientContextServiceName(phaseContext), EJBClientContextService.class, registrationService.ejbClientContextInjectedValue) .addDependency(getDiscoveryServiceName(phaseContext), Discovery.class, registrationService.discoveryInjector); if (profileServiceName != null) { builder.addDependency(profileServiceName, RemotingProfileService.class, registrationService.profileServiceInjectedValue); } builder.install(); final EEModuleDescription moduleDescription = deploymentUnit.getAttachment(Attachments.EE_MODULE_DESCRIPTION); if (moduleDescription == null) { return; } //we need to make sure all our components have a dependency on the EJB client context registration, which in turn implies a dependency on the context for(final ComponentDescription component : moduleDescription.getComponentDescriptions()) { component.addDependency(registrationServiceName, ServiceBuilder.DependencyType.REQUIRED); } } @Override public void undeploy(final DeploymentUnit deploymentUnit) { } private ServiceName getEJBClientContextServiceName(final DeploymentPhaseContext phaseContext) { final DeploymentUnit deploymentUnit = phaseContext.getDeploymentUnit(); final DeploymentUnit parentDeploymentUnit = deploymentUnit.getParent(); // The top level parent deployment unit will have the attachment containing the EJB client context // service name ServiceName serviceName; if (parentDeploymentUnit != null) { serviceName = parentDeploymentUnit.getAttachment(EjbDeploymentAttachmentKeys.EJB_CLIENT_CONTEXT_SERVICE_NAME); } else { serviceName = deploymentUnit.getAttachment(EjbDeploymentAttachmentKeys.EJB_CLIENT_CONTEXT_SERVICE_NAME); } if (serviceName != null) { return serviceName; } return EJBClientContextService.DEFAULT_SERVICE_NAME; } private ServiceName getDiscoveryServiceName(final DeploymentPhaseContext phaseContext) { final DeploymentUnit deploymentUnit = phaseContext.getDeploymentUnit(); final DeploymentUnit parentDeploymentUnit = deploymentUnit.getParent(); if (parentDeploymentUnit != null) { return DiscoveryService.BASE_NAME.append(parentDeploymentUnit.getName()); } else { return DiscoveryService.BASE_NAME.append(deploymentUnit.getName()); } } private static final class RegistrationService implements Service<Void> { private final Module module; final InjectedValue<EJBClientContextService> ejbClientContextInjectedValue = new InjectedValue<>(); final InjectedValue<Discovery> discoveryInjector = new InjectedValue<>(); final InjectedValue<RemotingProfileService> profileServiceInjectedValue = new InjectedValue<>(); private RegistrationService(Module module) { this.module = module; } @Override public void start(StartContext context) throws StartException { doPrivileged((PrivilegedAction<Void>) () -> { // associate the EJB client context and discovery setup with the deployment classloader final EJBClientContextService ejbClientContextService = ejbClientContextInjectedValue.getValue(); final EJBClientContext ejbClientContext = ejbClientContextService.getClientContext(); final AuthenticationContext ejbClientClustersAuthenticationContext = ejbClientContextService.getClustersAuthenticationContext(); final ModuleClassLoader classLoader = module.getClassLoader(); EjbLogger.DEPLOYMENT_LOGGER.debugf("Registering EJB client context %s for classloader %s", ejbClientContext, classLoader); final ContextManager<AuthenticationContext> authenticationContextManager = AuthenticationContext.getContextManager(); final RemotingProfileService profileService = profileServiceInjectedValue.getOptionalValue(); if (profileService != null || ejbClientClustersAuthenticationContext != null) { // this is cheating but it works for our purposes AuthenticationContext authenticationContext = authenticationContextManager.getClassLoaderDefault(classLoader); if (authenticationContext == null) { authenticationContext = authenticationContextManager.get(); } // now transform it if (profileService != null) { for (RemotingProfileService.ConnectionSpec connectionSpec : profileService.getConnectionSpecs()) { authenticationContext = transformOne(connectionSpec, authenticationContext); } } if (ejbClientClustersAuthenticationContext != null) { authenticationContext = authenticationContext.with(0, ejbClientClustersAuthenticationContext); } // and set the result authenticationContextManager.setClassLoaderDefault(classLoader, authenticationContext); } EJBClientContext.getContextManager().setClassLoaderDefault(classLoader, ejbClientContext); Discovery.getContextManager().setClassLoaderDefault(classLoader, discoveryInjector.getValue()); return null; }); } @Override public void stop(StopContext context) { // de-associate the EJB client context with the deployment classloader doPrivileged((PrivilegedAction<Void>) () -> { final ModuleClassLoader classLoader = module.getClassLoader(); EjbLogger.DEPLOYMENT_LOGGER.debugf("unRegistering EJB client context for classloader %s", classLoader); EJBClientContext.getContextManager().setClassLoaderDefault(classLoader, null); Discovery.getContextManager().setClassLoaderDefault(classLoader, null); // this is redundant but should be safe AuthenticationContext.getContextManager().setClassLoaderDefault(classLoader, null); return null; }); } @Override public Void getValue() throws IllegalStateException, IllegalArgumentException { return null; } private static AuthenticationContext transformOne(RemotingProfileService.ConnectionSpec connectionSpec, AuthenticationContext context) { final AbstractOutboundConnectionService connectionService = connectionSpec.getInjector().getValue(); AuthenticationConfiguration authenticationConfiguration = connectionService.getAuthenticationConfiguration(); final URI destinationUri = connectionService.getDestinationUri(); MatchRule rule = MatchRule.ALL; final String scheme = destinationUri.getScheme(); if (scheme != null) { rule = rule.matchProtocol(scheme); } final String host = destinationUri.getHost(); if (host != null) { rule = rule.matchHost(host); } final int port = destinationUri.getPort(); if (port != -1) { rule = rule.matchPort(port); } final String path = destinationUri.getPath(); if (path != null && ! path.isEmpty()) { rule = rule.matchPath(path); } final OptionMap connectOptions = connectionSpec.getConnectOptions(); authenticationConfiguration = RemotingOptions.mergeOptionsIntoAuthenticationConfiguration(connectOptions, authenticationConfiguration); AuthenticationConfiguration configuration = CLIENT.getAuthenticationConfiguration(destinationUri, context, - 1, "ejb", "jboss", null); return context.with(0, rule, configuration.with(authenticationConfiguration)); } } }
package it.xsemantics.example.fj.tests; import it.xsemantics.example.fj.fj.Class; import it.xsemantics.example.fj.fj.Field; import it.xsemantics.example.fj.fj.FjFactory; import it.xsemantics.example.fj.fj.Method; import it.xsemantics.example.fj.fj.MethodBody; import it.xsemantics.example.fj.fj.Parameter; import it.xsemantics.example.fj.fj.Program; import it.xsemantics.example.fj.fj.Selection; import it.xsemantics.example.fj.fj.This; import it.xsemantics.example.fj.fj.Type; import it.xsemantics.example.fj.util.ClassFactory; import it.xsemantics.example.fj.util.FjTypeUtils; import java.io.ByteArrayOutputStream; import java.io.IOException; import junit.textui.TestRunner; import org.eclipse.emf.ecore.resource.Resource; /** * <!-- begin-user-doc --> A test case for the model object ' * <em><b>Program</b></em>'. <!-- end-user-doc --> * * @generated NOT */ public class FormattingTest extends TestWithLoader { /** * The fixture for this Program test case. <!-- begin-user-doc --> <!-- * end-user-doc --> * * @generated */ protected Program fixture = null; FjTypeUtils fjTypeUtils; /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public static void main(String[] args) { TestRunner.run(FormattingTest.class); } /** * Constructs a new Program test case with the given name. <!-- * begin-user-doc --> <!-- end-user-doc --> * * @generated */ public FormattingTest(String name) { super(name); } /** * Sets the fixture for this Program test case. <!-- begin-user-doc --> <!-- * end-user-doc --> * * @generated */ protected void setFixture(Program fixture) { this.fixture = fixture; } /** * Returns the fixture for this Program test case. <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ protected Program getFixture() { return fixture; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @see org.junit.TestCase#setUp() * @generated */ @Override protected void setUp() throws Exception { super.setUp(); setFixture(FjFactory.eINSTANCE.createProgram()); fjTypeUtils = getInjector().getInstance(FjTypeUtils.class); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @see org.junit.TestCase#tearDown() * @generated */ @Override protected void tearDown() throws Exception { setFixture(null); } /** * Test the formatting of a class model directly * * @throws IOException */ public void testResourceWithOneClass() throws IOException { Resource resource = createResource(); resource.getContents().add(fixture); Class cl = ClassFactory.createClass("A"); fixture.getClasses().add(cl); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); resource.save(outputStream, null); assertEquals("class A {\n}", outputStream.toString().replace("\r", "")); } /** * Test the formatting of a more complex program model directly * * @throws IOException */ public void testFieldsAndMethods() throws IOException { Resource resource = createResource(); resource.getContents().add(fixture); Class cl = ClassFactory.createClass("A"); fixture.getClasses().add(cl); Type clType = fjTypeUtils.createClassType(cl); Field field = FjFactory.eINSTANCE.createField(); field.setName("f1"); field.setType(clType); cl.getMembers().add(field); field = FjFactory.eINSTANCE.createField(); field.setName("f2"); clType = fjTypeUtils.createClassType(cl); field.setType(clType); cl.getMembers().add(field); clType = fjTypeUtils.createClassType(cl); Method method = FjFactory.eINSTANCE.createMethod(); method.setName("myMeth"); method.setType(clType); clType = fjTypeUtils.createClassType(cl); Parameter parameter = FjFactory.eINSTANCE.createParameter(); parameter.setName("p1"); parameter.setType(clType); method.getParams().add(parameter); clType = fjTypeUtils.createClassType(cl); parameter = FjFactory.eINSTANCE.createParameter(); parameter.setName("p2"); parameter.setType(clType); method.getParams().add(parameter); This t = FjFactory.eINSTANCE.createThis(); t.setVariable("this"); Selection selection = FjFactory.eINSTANCE.createSelection(); selection.setReceiver(t); selection.setMessage(field); MethodBody methodBody = FjFactory.eINSTANCE.createMethodBody(); methodBody.setExpression(selection); method.setBody(methodBody); cl.getMembers().add(method); cl = ClassFactory.createClass("B"); fixture.getClasses().add(cl); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); resource.save(outputStream, null); assertEquals( "class A {\n\tA f1;\n\tA f2;\n\tA myMeth(A p1, A p2) { return this.f2; }\n}\n\nclass B {\n}", outputStream.toString().replace("\r", "")); } } // FormattingTest
package io.ghostwriter.openjdk.v7; import io.ghostwriter.openjdk.v7.common.Instrumenter; import io.ghostwriter.openjdk.v7.common.Logger; import javax.annotation.processing.*; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.TypeElement; import java.util.Set; @SupportedAnnotationTypes("*") @SupportedSourceVersion(SourceVersion.RELEASE_7) @SupportedOptions({Instrumenter.Option.GHOSTWRITER_ANNOTATED_ONLY, Instrumenter.Option.GHOSTWRITER_EXCLUDE, Instrumenter.Option.GHOSTWRITER_TRACE_ON_ERROR, Instrumenter.Option.GHOSTWRITER_TRACE_RETURNING, Instrumenter.Option.GHOSTWRITER_TRACE_VALUE_CHANGE, Instrumenter.Option.GHOSTWRITER_EXCLUDE_METHODS, Instrumenter.Option.GHOSTWRITER_INSTRUMENT, Instrumenter.Option.GHOSTWRITER_VERBOSE, Instrumenter.Option.GHOSTWRITER_SHORT_METHOD_LIMIT}) public class GhostWriterAnnotationProcessor extends AbstractProcessor { // part of the Annotation processor API. Since GhostWriter just hijacks the processor pipeline // we take care not to claim any annotations and thus always return false. protected static boolean NO_ANNOTATIONS_CLAIMED = false; private Instrumenter instrumenter; public GhostWriterAnnotationProcessor(Instrumenter instrumenter) { this.instrumenter = instrumenter; } public GhostWriterAnnotationProcessor() { this(new Javac7Instrumenter()); } @Override public synchronized void init(ProcessingEnvironment processingEnv) { super.init(processingEnv); instrumenter.initialize(processingEnv); } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { return NO_ANNOTATIONS_CLAIMED; } if (!instrumenter.doInstrument()) { Logger.note(getClass(), "process", "skipping processing..."); return NO_ANNOTATIONS_CLAIMED; } Logger.note(getClass(), "process", "starting processing..."); Set<? extends Element> elements = roundEnv.getRootElements(); for (Element element : elements) { ElementKind kind = element.getKind(); if (kind != null && kind.isClass()) { instrumenter.process(element); } } return NO_ANNOTATIONS_CLAIMED; } }
package com.tinkerpop.gremlin.process.util; import com.tinkerpop.gremlin.process.Traversal; import com.tinkerpop.gremlin.structure.Element; import com.tinkerpop.gremlin.structure.Property; import com.tinkerpop.gremlin.structure.Vertex; import com.tinkerpop.gremlin.structure.util.StringFactory; import com.tinkerpop.gremlin.structure.util.referenced.ReferencedFactory; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Supplier; import java.util.function.UnaryOperator; public class DefaultTraversalSideEffects implements Traversal.SideEffects { protected Map<String, Object> objectMap = new HashMap<>(); protected Map<String, Supplier> supplierMap = new HashMap<>(); protected Optional<UnaryOperator> sackSplitOperator = Optional.empty(); protected Optional<Supplier> sackInitialValue = Optional.empty(); public DefaultTraversalSideEffects() { } /** * {@inheritDoc} */ @Override public void registerSupplier(final String key, final Supplier supplier) { this.supplierMap.put(key, supplier); } /** * {@inheritDoc} */ @Override public <V> Optional<Supplier<V>> getRegisteredSupplier(final String key) { return Optional.ofNullable(this.supplierMap.get(key)); } /** * {@inheritDoc} */ public void registerSupplierIfAbsent(final String key, final Supplier supplier) { if (!this.supplierMap.containsKey(key)) this.supplierMap.put(key, supplier); } @Override public <S> void setSack(final Supplier<S> initialValue, final Optional<UnaryOperator<S>> splitOperator) { this.sackInitialValue = Optional.ofNullable(initialValue); this.sackSplitOperator = (Optional) splitOperator; } @Override public <S> Optional<Supplier<S>> getSackInitialValue() { return (Optional) this.sackInitialValue; } @Override public <S> Optional<UnaryOperator<S>> getSackSplitOperator() { return (Optional) this.sackSplitOperator; } /** * {@inheritDoc} */ @Override public boolean exists(final String key) { return this.objectMap.containsKey(key) || this.supplierMap.containsKey(key); } /** * {@inheritDoc} */ @Override public void set(final String key, final Object value) { SideEffectHelper.validateSideEffect(key, value); this.objectMap.put(key, value); } /** * {@inheritDoc} */ @Override public <V> V get(final String key) throws IllegalArgumentException { final V value = (V) this.objectMap.get(key); if (null != value) return value; else { if (this.supplierMap.containsKey(key)) { final V v = (V) this.supplierMap.get(key).get(); this.objectMap.put(key, v); return v; } else { throw Traversal.SideEffects.Exceptions.sideEffectDoesNotExist(key); } } } /** * {@inheritDoc} */ @Override public <V> V getOrCreate(final String key, final Supplier<V> orCreate) { if (this.objectMap.containsKey(key)) return (V) this.objectMap.get(key); else if (this.supplierMap.containsKey(key)) { final V value = (V) this.supplierMap.get(key).get(); this.objectMap.put(key, value); return value; } else { final V value = orCreate.get(); this.objectMap.put(key, value); return value; } } /** * {@inheritDoc} */ @Override public void remove(final String key) { this.objectMap.remove(key); this.supplierMap.remove(key); } /** * {@inheritDoc} */ @Override public Set<String> keys() { final Set<String> keys = new HashSet<>(); keys.addAll(this.objectMap.keySet()); keys.addAll(this.supplierMap.keySet()); return keys; } /** * {@inheritDoc} */ @Override public void setLocalVertex(final Vertex vertex) { final Property<Map<String, Object>> property = vertex.property(SIDE_EFFECTS); if (property.isPresent()) { this.objectMap = property.value(); } else { this.objectMap = new HashMap<>(); vertex.property(SIDE_EFFECTS, this.objectMap); } } @Override public String toString() { return StringFactory.traversalSideEffectsString(this); } }
package org.glyptodon.guacamole.servlet; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.glyptodon.guacamole.GuacamoleClientException; import org.glyptodon.guacamole.GuacamoleConnectionClosedException; import org.glyptodon.guacamole.GuacamoleException; import org.glyptodon.guacamole.GuacamoleResourceNotFoundException; import org.glyptodon.guacamole.GuacamoleServerException; import org.glyptodon.guacamole.io.GuacamoleReader; import org.glyptodon.guacamole.io.GuacamoleWriter; import org.glyptodon.guacamole.net.GuacamoleTunnel; import org.glyptodon.guacamole.protocol.GuacamoleStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A HttpServlet implementing and abstracting the operations required by the * HTTP implementation of the JavaScript Guacamole client's tunnel. * * @author Michael Jumper */ public abstract class GuacamoleHTTPTunnelServlet extends HttpServlet { /** * Logger for this class. */ private Logger logger = LoggerFactory.getLogger(GuacamoleHTTPTunnelServlet.class); /** * The prefix of the query string which denotes a tunnel read operation. */ private static final String READ_PREFIX = "read:"; /** * The prefix of the query string which denotes a tunnel write operation. */ private static final String WRITE_PREFIX = "write:"; /** * The length of the read prefix, in characters. */ private static final int READ_PREFIX_LENGTH = READ_PREFIX.length(); /** * The length of the write prefix, in characters. */ private static final int WRITE_PREFIX_LENGTH = WRITE_PREFIX.length(); /** * The length of every tunnel UUID, in characters. */ private static final int UUID_LENGTH = 36; @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException { handleTunnelRequest(request, response); } @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException { handleTunnelRequest(request, response); } /** * Sends an error on the given HTTP response using the information within * the given GuacamoleStatus. * * @param response The HTTP response to use to send the error. * @param guac_status The status to send * @param message A human-readable message that can be presented to the * user. * @throws ServletException If an error prevents sending of the error * code. */ public static void sendError(HttpServletResponse response, GuacamoleStatus guac_status, String message) throws ServletException { try { // If response not committed, send error code and message if (!response.isCommitted()) { response.addHeader("Guacamole-Status-Code", Integer.toString(guac_status.getGuacamoleStatusCode())); response.addHeader("Guacamole-Error-Message", message); response.sendError(guac_status.getHttpStatusCode()); } } catch (IOException ioe) { // If unable to send error at all due to I/O problems, // rethrow as servlet exception throw new ServletException(ioe); } } /** * Dispatches every HTTP GET and POST request to the appropriate handler * function based on the query string. * * @param request The HttpServletRequest associated with the GET or POST * request received. * @param response The HttpServletResponse associated with the GET or POST * request received. * @throws ServletException If an error occurs while servicing the request. */ protected void handleTunnelRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException { try { String query = request.getQueryString(); if (query == null) throw new GuacamoleClientException("No query string provided."); // If connect operation, call doConnect() and return tunnel UUID // in response. if (query.equals("connect")) { GuacamoleTunnel tunnel = doConnect(request); if (tunnel != null) { // Get session HttpSession httpSession = request.getSession(true); GuacamoleSession session = new GuacamoleSession(httpSession); // Attach tunnel to session session.attachTunnel(tunnel); try { // Ensure buggy browsers do not cache response response.setHeader("Cache-Control", "no-cache"); // Send UUID to client response.getWriter().print(tunnel.getUUID().toString()); } catch (IOException e) { throw new GuacamoleServerException(e); } } // Failed to connect else throw new GuacamoleResourceNotFoundException("No tunnel created."); } // If read operation, call doRead() with tunnel UUID, ignoring any // characters following the tunnel UUID. else if(query.startsWith(READ_PREFIX)) doRead(request, response, query.substring( READ_PREFIX_LENGTH, READ_PREFIX_LENGTH + UUID_LENGTH)); // If write operation, call doWrite() with tunnel UUID, ignoring any // characters following the tunnel UUID. else if(query.startsWith(WRITE_PREFIX)) doWrite(request, response, query.substring( WRITE_PREFIX_LENGTH, WRITE_PREFIX_LENGTH + UUID_LENGTH)); // Otherwise, invalid operation else throw new GuacamoleClientException("Invalid tunnel operation: " + query); } // Catch any thrown guacamole exception and attempt to pass within the // HTTP response, logging each error appropriately. catch (GuacamoleClientException e) { logger.warn("HTTP tunnel request rejected: {}", e.getMessage()); sendError(response, e.getStatus(), e.getMessage()); } catch (GuacamoleException e) { logger.error("HTTP tunnel request failed: {}", e.getMessage()); logger.debug("Internal error in HTTP tunnel.", e); sendError(response, e.getStatus(), "Internal server error."); } } /** * Called whenever the JavaScript Guacamole client makes a connection * request. It it up to the implementor of this function to define what * conditions must be met for a tunnel to be configured and returned as a * result of this connection request (whether some sort of credentials must * be specified, for example). * * @param request The HttpServletRequest associated with the connection * request received. Any parameters specified along with * the connection request can be read from this object. * @return A newly constructed GuacamoleTunnel if successful, * null otherwise. * @throws GuacamoleException If an error occurs while constructing the * GuacamoleTunnel, or if the conditions * required for connection are not met. */ protected abstract GuacamoleTunnel doConnect(HttpServletRequest request) throws GuacamoleException; /** * Called whenever the JavaScript Guacamole client makes a read request. * This function should in general not be overridden, as it already * contains a proper implementation of the read operation. * * @param request The HttpServletRequest associated with the read request * received. * @param response The HttpServletResponse associated with the write request * received. Any data to be sent to the client in response * to the write request should be written to the response * body of this HttpServletResponse. * @param tunnelUUID The UUID of the tunnel to read from, as specified in * the write request. This tunnel must be attached to * the Guacamole session. * @throws GuacamoleException If an error occurs while handling the read * request. */ protected void doRead(HttpServletRequest request, HttpServletResponse response, String tunnelUUID) throws GuacamoleException { HttpSession httpSession = request.getSession(false); GuacamoleSession session = new GuacamoleSession(httpSession); // Get tunnel, ensure tunnel exists GuacamoleTunnel tunnel = session.getTunnel(tunnelUUID); if (tunnel == null) throw new GuacamoleResourceNotFoundException("No such tunnel."); // Ensure tunnel is open if (!tunnel.isOpen()) throw new GuacamoleResourceNotFoundException("Tunnel is closed."); // Obtain exclusive read access GuacamoleReader reader = tunnel.acquireReader(); try { // Note that although we are sending text, Webkit browsers will // buffer 1024 bytes before starting a normal stream if we use // anything but application/octet-stream. response.setContentType("application/octet-stream"); response.setHeader("Cache-Control", "no-cache"); // Get writer for response Writer out = new BufferedWriter(new OutputStreamWriter( response.getOutputStream(), "UTF-8")); // Stream data to response, ensuring output stream is closed try { // Detach tunnel and throw error if EOF (and we haven't sent any // data yet. char[] message = reader.read(); if (message == null) throw new GuacamoleConnectionClosedException("Tunnel reached end of stream."); // For all messages, until another stream is ready (we send at least one message) do { // Get message output bytes out.write(message, 0, message.length); // Flush if we expect to wait if (!reader.available()) { out.flush(); response.flushBuffer(); } // No more messages another stream can take over if (tunnel.hasQueuedReaderThreads()) break; } while (tunnel.isOpen() && (message = reader.read()) != null); // Close tunnel immediately upon EOF if (message == null) { session.detachTunnel(tunnel); tunnel.close(); } // End-of-instructions marker out.write("0.;"); out.flush(); response.flushBuffer(); } // Send end-of-stream marker and close tunnel if connection is closed catch (GuacamoleConnectionClosedException e) { // Detach and close session.detachTunnel(tunnel); tunnel.close(); // End-of-instructions marker out.write("0.;"); out.flush(); response.flushBuffer(); } catch (GuacamoleException e) { // Detach and close session.detachTunnel(tunnel); tunnel.close(); throw e; } // Always close output stream finally { out.close(); } } catch (IOException e) { // Log typically frequent I/O error if desired logger.debug("Error writing to servlet output stream", e); // Detach and close session.detachTunnel(tunnel); tunnel.close(); } finally { tunnel.releaseReader(); } } /** * Called whenever the JavaScript Guacamole client makes a write request. * This function should in general not be overridden, as it already * contains a proper implementation of the write operation. * * @param request The HttpServletRequest associated with the write request * received. Any data to be written will be specified within * the body of this request. * @param response The HttpServletResponse associated with the write request * received. * @param tunnelUUID The UUID of the tunnel to write to, as specified in * the write request. This tunnel must be attached to * the Guacamole session. * @throws GuacamoleException If an error occurs while handling the write * request. */ protected void doWrite(HttpServletRequest request, HttpServletResponse response, String tunnelUUID) throws GuacamoleException { HttpSession httpSession = request.getSession(false); GuacamoleSession session = new GuacamoleSession(httpSession); GuacamoleTunnel tunnel = session.getTunnel(tunnelUUID); if (tunnel == null) throw new GuacamoleResourceNotFoundException("No such tunnel."); // We still need to set the content type to avoid the default of // text/html, as such a content type would cause some browsers to // attempt to parse the result, even though the JavaScript client // does not explicitly request such parsing. response.setContentType("application/octet-stream"); response.setHeader("Cache-Control", "no-cache"); response.setContentLength(0); // Send data try { // Get writer from tunnel GuacamoleWriter writer = tunnel.acquireWriter(); // Get input reader for HTTP stream Reader input = new InputStreamReader( request.getInputStream(), "UTF-8"); // Transfer data from input stream to tunnel output, ensuring // input is always closed try { // Buffer int length; char[] buffer = new char[8192]; // Transfer data using buffer while (tunnel.isOpen() && (length = input.read(buffer, 0, buffer.length)) != -1) writer.write(buffer, 0, length); } // Close input stream in all cases finally { input.close(); } } catch (GuacamoleConnectionClosedException e) { logger.debug("Connection to guacd closed.", e); } catch (IOException e) { // Detach and close session.detachTunnel(tunnel); tunnel.close(); throw new GuacamoleServerException("I/O Error sending data to server: " + e.getMessage(), e); } finally { tunnel.releaseWriter(); } } } /** * \example ExampleTunnelServlet.java * * A basic example demonstrating extending GuacamoleTunnelServlet and * implementing doConnect() to configure the Guacamole connection as * desired. */
package org.jboss.as.console.client.administration.role; import static org.jboss.as.console.client.administration.role.model.PrincipalType.USER; import static org.jboss.dmr.client.ModelDescriptionConstants.*; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Stack; import com.allen_sauer.gwt.log.client.Log; import com.google.inject.Inject; import com.google.web.bindery.event.shared.EventBus; import com.gwtplatform.mvp.client.Presenter; import com.gwtplatform.mvp.client.View; import com.gwtplatform.mvp.client.annotations.NameToken; import com.gwtplatform.mvp.client.annotations.ProxyCodeSplit; import com.gwtplatform.mvp.client.proxy.Place; import com.gwtplatform.mvp.client.proxy.Proxy; import org.jboss.as.console.client.Console; import org.jboss.as.console.client.administration.role.model.Principal; import org.jboss.as.console.client.administration.role.model.PrincipalType; import org.jboss.as.console.client.administration.role.model.Principals; import org.jboss.as.console.client.administration.role.model.RoleAssignment; import org.jboss.as.console.client.administration.role.model.RoleAssignments; import org.jboss.as.console.client.administration.role.model.Roles; import org.jboss.as.console.client.administration.role.model.ScopedRole; import org.jboss.as.console.client.core.NameTokens; import org.jboss.as.console.client.core.message.Message; import org.jboss.as.console.client.domain.model.HostInformationStore; import org.jboss.as.console.client.domain.model.ServerGroupStore; import org.jboss.as.console.client.domain.model.SimpleCallback; import org.jboss.as.console.client.rbac.StandardRole; import org.jboss.as.console.client.shared.BeanFactory; import org.jboss.as.console.client.shared.subsys.RevealStrategy; import org.jboss.as.console.spi.AccessControl; import org.jboss.ballroom.client.widgets.window.DefaultWindow; import org.jboss.dmr.client.ModelNode; import org.jboss.dmr.client.dispatch.DispatchAsync; import org.jboss.dmr.client.dispatch.impl.DMRAction; import org.jboss.dmr.client.dispatch.impl.DMRResponse; import org.jboss.gwt.flow.client.Outcome; /** * There are some constraints when managing role assignments in the console: * <ol> * <li>There has to be at least one inclusion for the role assignment</li> * <li>An exclusion can only contain users excluded from a group</li> * </ol> * <p>Role assignment which do not met these constraints, won't be visible in the conole and have to be * managed using other tools (e.g. the CLI)</p> * * @author Harald Pehl */ public class RoleAssignmentPresenter extends Presenter<RoleAssignmentPresenter.MyView, RoleAssignmentPresenter.MyProxy> { private final RevealStrategy revealStrategy; private final DispatchAsync dispatcher; private final BeanFactory beanFactory; private final ReadModelOperation modelOperation; private DefaultWindow window; private Principals principals; private RoleAssignments assignments; private Roles roles; private List<String> hosts; private List<String> serverGroups; @Inject public RoleAssignmentPresenter(final EventBus eventBus, final MyView view, final MyProxy proxy, final RevealStrategy revealStrategy, final DispatchAsync dispatcher, final BeanFactory beanFactory, final HostInformationStore hostInformationStore, ServerGroupStore serverGroupStore) { super(eventBus, view, proxy); this.revealStrategy = revealStrategy; this.dispatcher = dispatcher; this.beanFactory = beanFactory; this.modelOperation = new ReadModelOperation(dispatcher, beanFactory, hostInformationStore, serverGroupStore); this.principals = new Principals(); this.assignments = new RoleAssignments(beanFactory); this.roles = new Roles(); this.hosts = new ArrayList<String>(); this.serverGroups = new ArrayList<String>(); } @Override protected void onBind() { super.onBind(); getView().setPresenter(this); } @Override protected void revealInParent() { revealStrategy.revealInAdministration(this); } @Override protected void onReset() { super.onReset(); if (!modelOperation.isPending()) { System.out.print("Loading role assignments..."); modelOperation.extecute(new Outcome<Map<ReadModelOperation.Results, Object>>() { @Override public void onFailure(final Map<ReadModelOperation.Results, Object> context) { System.out.println("FAILED"); Throwable caught = (Throwable) context.get(ReadModelOperation.Results.ERROR); if (caught != null) { Log.error("Unknown error", caught); Console.error("Unknown error", caught.getMessage()); } } @Override @SuppressWarnings("unchecked") public void onSuccess(final Map<ReadModelOperation.Results, Object> context) { System.out.println("DONE"); principals = (Principals) context.get(ReadModelOperation.Results.PRINCIPALS); assignments = (RoleAssignments) context.get(ReadModelOperation.Results.ASSIGNMENTS); roles = (Roles) context.get(ReadModelOperation.Results.ROLES); hosts = (List<String>) context.get(ReadModelOperation.Results.HOSTS); serverGroups = (List<String>) context.get(ReadModelOperation.Results.SERVER_GROUPS); getView().update(principals, assignments, roles, hosts, serverGroups); } }); } } public void launchAddRoleAssignmentWizard(final PrincipalType type) { closeWindow(); String title = type == USER ? Console.CONSTANTS.role_assignment_add_user() : Console .CONSTANTS.role_assignment_add_group(); window = new DefaultWindow(title); window.setWidth(480); window.setHeight(570); AddRoleAssignmentWizard wizard = new AddRoleAssignmentWizard(type, principals, roles, this, beanFactory); window.trapWidget(wizard.asWidget()); window.setGlassEnabled(true); window.center(); } public void addRoleAssignment(final RoleAssignment assignment) { Console.info("Not yet implemented"); } public void saveRoleAssignment(final RoleAssignment assignment, final Map<String, Object> changedValues) { Console.info("Not yet implemented"); } public void removeRoleAssignment(final RoleAssignment assignment) { Console.info("Not yet implemented"); } public void launchAddScopedRoleWizard() { closeWindow(); window = new DefaultWindow(Console.CONSTANTS.administration_add_scoped_role()); window.setWidth(480); window.setHeight(400); AddScopedRoleWizard wizard = new AddScopedRoleWizard(hosts, serverGroups, this); window.trapWidget(wizard.asWidget()); window.setGlassEnabled(true); window.center(); } public void addScopedRole(final ScopedRole role) { Console.info("Not yet implemented"); System.out.println( "Add scoped role " + role.getName() + " based on " + role.getBaseRole() + " scoped to " + role .getType() + " " + role.getScope()); } public void saveScopedRole(final ScopedRole role, final Map<String, Object> changedValues) { Console.info("Not yet implemented"); System.out.println( "Save scoped role " + role.getName() + " based on " + role.getBaseRole() + " scoped to " + role .getType() + " " + role.getScope()); System.out.println("Changed values: " + changedValues); } public void removeScopedRole(final ScopedRole role) { Console.info("Not yet implemented"); System.out.println( "Remove scoped role " + role.getName() + " based on " + role.getBaseRole() + " scoped to " + role .getType() + " " + role.getScope()); } public void closeWindow() { if (window != null) { window.hide(); } } public void onAdd(final StandardRole role, final RoleAssignment roleAssignment, final Principal principal) { closeWindow(); // System.out.println("About to add " + principal.getType() + " " + principal // .getName() + " to role " + role + " / " + (roleAssignment.isInclude() ? "includes" : "exludes")); AddRoleAssignmentOperation addPrincipalOperation = new AddRoleAssignmentOperation(dispatcher, role, roleAssignment, principal); addPrincipalOperation.extecute(new Outcome<Stack<Boolean>>() { @Override public void onFailure(final Stack<Boolean> context) { // TODO Error handling Console.MODULES.getMessageCenter().notify(new Message("Cannot add principal", Message.Severity.Error)); } @Override public void onSuccess(final Stack<Boolean> context) { // getView().reset(); } }); } public void onDelete(final StandardRole role, final RoleAssignment roleAssignment, final Principal principal) { // System.out.println("About to delete " + principal.getType() + " " + principal // .getName() + " from role " + role + " / " + (roleAssignment.isInclude() ? "includes" : "exludes")); final ModelNode operation = new ModelNode(); StringBuilder principalKey = new StringBuilder(); boolean realmGiven = principal.getRealm() != null && principal.getRealm().length() != 0; principalKey.append(principal.getType().name().toLowerCase()).append("-").append(principal.getName()); if (realmGiven) { principalKey.append("@").append(principal.getRealm()); } operation.get(ADDRESS).add("core-service", "management"); operation.get(ADDRESS).add("access", "authorization"); operation.get(ADDRESS).add("role-mapping", role.name()); // operation.get(ADDRESS).add(roleAssignment.isInclude() ? "include" : "exclude", principalKey.toString()); operation.get(OP).set(REMOVE); dispatcher.execute(new DMRAction(operation), new SimpleCallback<DMRResponse>() { @Override public void onSuccess(DMRResponse response) { // getView().reset(); } }); } @ProxyCodeSplit @NameToken(NameTokens.RoleAssignmentPresenter) @AccessControl(resources = {"/core-service=management/access=authorization"}) public interface MyProxy extends Proxy<RoleAssignmentPresenter>, Place { } public interface MyView extends View { void setPresenter(final RoleAssignmentPresenter presenter); void update(final Principals principals, final RoleAssignments assignments, final Roles roles, final List<String> hosts, final List<String> serverGroups); } }
/** * Class: SmsTransportMethod * @author vijayu * Desc: This class represents a transport method using SMS and the procedures for * transmitting a TransportMessage using the SMS protocol. The defining feature * of SMS is the character limit imposed for a given SMS message. To account for * this, we use a SMSSplitTransportMessage to automatically split the message into * parts that we know will each fit into SMS messages with easy to parse metadata * for each message part. */ //TODO: Handle Incoming SMS Messages - right now, our thread only transmits package org.javarosa.communication.sms; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Enumeration; import javax.microedition.io.Connector; import javax.wireless.messaging.MessageConnection; import javax.wireless.messaging.MessageListener; import javax.wireless.messaging.TextMessage; import org.javarosa.core.api.IActivity; import org.javarosa.core.services.ITransportManager; import org.javarosa.core.services.transport.ITransportDestination; import org.javarosa.core.services.transport.TransportMessage; import org.javarosa.core.services.transport.TransportMethod; public class SmsTransportMethod implements TransportMethod { private TransportMessage message; private ITransportManager manager; private IActivity destinationRetrievalActivity; private WorkerThread primaryWorker; private static final String name = "SMS"; private static final String SMSPort = "16498"; //TODO: Clarify - this was taken from previous CRS App public void transmit(TransportMessage message, ITransportManager manager) { this.message = message; this.manager = manager; primaryWorker = new WorkerThread(); new Thread(primaryWorker).start(); } public void closeConnections() { if(primaryWorker != null) primaryWorker.cleanStreams(); } public ITransportDestination getDefaultDestination() { //FIXME: Figure out the SMS application of this... return new SmsTransportDestination("sms://+5566511"); } public IActivity getDestinationRetrievalActivity() { return destinationRetrievalActivity; } public int getId() { return TransportMethod.SMS; } public String getName() { return name; } public void setDestinationRetrievalActivity(IActivity activity) { destinationRetrievalActivity = activity; } private class WorkerThread implements Runnable, MessageListener{ private MessageConnection mconn; public void cleanStreams(){ if (mconn != null) { try { mconn.close(); } catch (IOException e) { System.err.println("IO Exception while closing SMS Message Connection"); e.printStackTrace(); } } } public void run() { // Open an SMS Message connection to send the messages String destinationUrl = ((SmsTransportDestination)message.getDestination()).getSmsAddress(); try { SMSSplitTransportMessage sp = new SMSSplitTransportMessage(message); sp.splitMessage(); // Set destination URL from TransportMessage data mconn = (MessageConnection)Connector.open(destinationUrl); Enumeration e = sp.getMessageParts().elements(); TextMessage tmsg; while(e.hasMoreElements()) { tmsg = (TextMessage)mconn.newMessage(MessageConnection.TEXT_MESSAGE); tmsg.setAddress(destinationUrl); String payload = new String(((ByteArrayOutputStream)e.nextElement()).toByteArray()); System.out.println("SMS Payload: " + payload); tmsg.setPayloadText(payload); mconn.send(tmsg); } //FIXME: Risk of memory leak with tmsg here?? message.setStatus(TransportMessage.STATUS_DELIVERED); System.out.println("Status: " + message.getStatus()); message.setChanged(); message.notifyObservers(message.getReplyloadData()); } catch (IOException e) { System.err.println("Error sending SMS message"); e.printStackTrace(); } } public void notifyIncomingMessage(MessageConnection arg0) { // TODO: Implement thread for receiving messages } } }
package com.koch.ambeth.security.xml; import com.koch.ambeth.ioc.IInitializingModule; import com.koch.ambeth.ioc.annotation.FrameworkModule; import com.koch.ambeth.ioc.config.IBeanConfiguration; import com.koch.ambeth.ioc.factory.IBeanContextFactory; import com.koch.ambeth.security.privilege.transfer.TypePropertyPrivilegeOfService; import com.koch.ambeth.xml.ITypeBasedHandlerExtendable; import com.koch.ambeth.xml.ioc.XmlModule; @FrameworkModule public class SecurityXmlModule implements IInitializingModule { @Override public void afterPropertiesSet(IBeanContextFactory beanContextFactory) throws Throwable { IBeanConfiguration instantTypeHandlerBC = beanContextFactory.registerBean(TypePropertyPrivilegeOfServiceHandler.class) .parent("abstractElementHandler"); beanContextFactory.link(instantTypeHandlerBC) .to(XmlModule.CYCLIC_XML_HANDLER, ITypeBasedHandlerExtendable.class) .with(TypePropertyPrivilegeOfService.class); } }
package com.cedricziel.idea.fluid.viewHelpers; import com.cedricziel.idea.fluid.extensionPoints.ViewHelperProvider; import com.cedricziel.idea.fluid.viewHelpers.model.ViewHelper; import com.cedricziel.idea.fluid.viewHelpers.model.ViewHelperArgument; import com.intellij.lang.xml.XMLLanguage; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiFileFactory; import com.intellij.psi.XmlRecursiveElementVisitor; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.psi.xml.XmlText; import gnu.trove.THashMap; import org.jetbrains.annotations.NotNull; import java.io.*; import java.util.Map; public class DefaultViewHelpersProvider implements ViewHelperProvider { private static final Map<String, Map<String, ViewHelper>> myCache = new THashMap<>(); @NotNull @Override public Map<String, ViewHelper> provideForNamespace(@NotNull Project project, @NotNull String namespace) { if (!namespace.equals("TYPO3/Fluid/ViewHelpers") && !namespace.isEmpty()) { return new THashMap<>(); } String schemaLocation = "/schemas/fluid/7.6.xsd"; return getStringViewHelperMap(project, schemaLocation); } private synchronized Map<String, ViewHelper> getStringViewHelperMap(@NotNull Project project, String schemaLocation) { if (myCache.containsKey(schemaLocation)) { return myCache.get(schemaLocation); } String schema = readSchema(schemaLocation); XmlFile xmlLanguage = (XmlFile) PsiFileFactory.getInstance(project).createFileFromText(XMLLanguage.INSTANCE, schema); ViewHelperSchemaRecursiveElementVisitor visitor = new ViewHelperSchemaRecursiveElementVisitor(); visitor.visitXmlFile(xmlLanguage); myCache.put(schemaLocation, visitor.viewHelpers); return myCache.get(schemaLocation); } private String readSchema(String schemaLocation) { InputStream resourceAsStream = DefaultViewHelpersProvider.class.getResourceAsStream(schemaLocation); String schema = ""; try { schema = readFromInputStream(resourceAsStream); } catch (IOException e) { e.printStackTrace(); } return schema; } private class ViewHelperSchemaRecursiveElementVisitor extends XmlRecursiveElementVisitor { Map<String, ViewHelper> viewHelpers = new THashMap<>(); @Override public void visitXmlTag(XmlTag tag) { if (!tag.getName().equals("xsd:element")) { super.visitXmlTag(tag); return; } XmlAttribute nameAttribute = tag.getAttribute("name"); if (nameAttribute == null || nameAttribute.getValue() == null) { super.visitXmlTag(tag); return; } ViewHelper viewHelper = new ViewHelper(nameAttribute.getValue()); viewHelper.setDocumentation(extractDocumentation(tag)); XmlTag complexType = tag.findFirstSubTag("xsd:complexType"); if (complexType != null) { XmlTag[] attributeTags = complexType.findSubTags("xsd:attribute"); for (XmlTag attributeTag : attributeTags) { String argumentName = attributeTag.getAttributeValue("name"); if (argumentName == null) { continue; } ViewHelperArgument argument = new ViewHelperArgument(argumentName); argument.setDocumentation(extractDocumentation(attributeTag)); String attributeType = attributeTag.getAttributeValue("php:type"); if (attributeType == null) { argument.setType("mixed"); } else { argument.setType(attributeType); } String requiredAttribute = attributeTag.getAttributeValue("use"); if (requiredAttribute != null && requiredAttribute.equals("required")) { argument.setRequired(true); } viewHelper.addArgument(argumentName, argument); } } viewHelpers.put(nameAttribute.getValue(), viewHelper); super.visitXmlTag(tag); } private @NotNull String extractDocumentation(XmlTag attributeTag) { StringBuilder attributeDocumentation = new StringBuilder(); XmlTag attributeAnnotation = attributeTag.findFirstSubTag("xsd:annotation"); if (attributeAnnotation != null) { XmlTag attributeDoc = attributeAnnotation.findFirstSubTag("xsd:documentation"); if (attributeDoc != null) { for (XmlText textElement : attributeDoc.getValue().getTextElements()) { attributeDocumentation.append(textElement.getValue()); } } } return attributeDocumentation.toString(); } } private String readFromInputStream(InputStream inputStream) throws IOException { StringBuilder resultStringBuilder = new StringBuilder(); try (BufferedReader br = new BufferedReader(new InputStreamReader(inputStream))) { String line; while ((line = br.readLine()) != null) { resultStringBuilder.append(line).append("\n"); } } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException e) { e.printStackTrace(); } } } return resultStringBuilder.toString(); } }
package net.rhapso.koa.storage; import java.util.LinkedHashMap; import java.util.Map; public class BlockAddressable implements Addressable { private final Addressable underlying; private final BlockSize blockSize; private long position; private final Map<BlockId, Block> blocks; public BlockAddressable(final Addressable underlying, final BlockSize blockSize, final int cachedBlocks) { this.underlying = underlying; this.blockSize = blockSize; this.position = 0; this.blocks = new LinkedHashMap<BlockId, Block>() { @Override protected boolean removeEldestEntry(Map.Entry<BlockId, Block> eldest) { if (blocks.size() > cachedBlocks) { flush(eldest); return true; } return false; } }; } @Override public void seek(long pos) { this.position = pos; } @Override public void read(byte[] b) { obtainBlock().read(currentBlockOffset(), b); position += b.length; } @Override public void write(byte[] b) { obtainBlock().put(currentBlockOffset(), b); position += b.length; } @Override public int readInt() { int value = obtainBlock().readInt(currentBlockOffset()); position += 4; return value; } @Override public void writeInt(int v) { obtainBlock().putInt(currentBlockOffset(), v); position += 4; } private Block obtainBlock() { BlockId blockId = currentBlockId(); Block block = blocks.get(blockId); if (block == null) { long blockOffset = blockId.asLong() * blockSize.asLong(); byte[] bytes = new byte[blockSize.asInt()]; if (blockOffset >= underlying.length()) { underlying.seek(blockOffset); underlying.write(new byte[blockSize.asInt()]); } else { underlying.seek(blockOffset); underlying.read(bytes); } block = new Block(bytes, false); } blocks.put(blockId, block); return block; } @Override public long readLong() { long result = obtainBlock().readLong(currentBlockOffset()); position += 8; return result; } @Override public double readDouble() { double result = obtainBlock().readDouble(currentBlockOffset()); position += 8; return result; } @Override public void writeDouble(double d) { obtainBlock().putDouble(currentBlockOffset(), d); position += 8; } @Override public void writeLong(long v) { obtainBlock().putLong(currentBlockOffset(), v); position += 8; } @Override public int read() { byte b = obtainBlock().read(currentBlockOffset()); position++; return b; } @Override public void write(int aByte) { obtainBlock().put(currentBlockOffset(), (byte) aByte); position++; } private int currentBlockOffset() { return (int) (position % blockSize.asLong()); } private BlockId currentBlockId() { return new BlockId(position / blockSize.asLong()); } @Override public long length() { return underlying.length(); } @Override public Offset nextInsertionLocation(Offset currentOffset, long length) { if (length > blockSize.asInt()) { throw new IllegalArgumentException("Requested length exceeds block size"); } long relativeLocation = currentOffset.asLong() % blockSize.asLong(); if (relativeLocation + length > blockSize.asLong()) { return currentOffset.plus(blockSize.asLong() - relativeLocation); } return currentOffset; } @Override public void flush() { for (Map.Entry<BlockId, Block> entry : blocks.entrySet()) { flush(entry); } underlying.flush(); } private void flush(Map.Entry<BlockId, Block> mapEntry) { if (mapEntry.getValue().isDirty()) { underlying.seek(mapEntry.getKey().asLong() * blockSize.asLong()); underlying.write(mapEntry.getValue().bytes()); mapEntry.getValue().markClean(); } } @Override public void close() { underlying.close(); } public Offset getPosition() { return new Offset(position); } }
package org.apache.markt.leaks.rmi; import java.lang.ref.WeakReference; import java.net.URL; import java.net.URLClassLoader; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.rmi.server.UnicastRemoteObject; /** * Demonstrates the correct way for a web application created RMI registry to be * closed down, thereby avoiding a memory leak. * <p> * TODO: Figure out how to identify a web application created registry so it * can be shut down by the container if the web application fails to do * so. We need: * <ul> * <li>The current registry list</li> * <li>A way to determine TCCL for each registry</li> * </ul> */ public class RegistryLeak { public static void main(String[] args) { RegistryLeak registryLeak = new RegistryLeak(); // Switch TCCL registryLeak.start(); // Create RMI registry registryLeak.register(); // Clean-up registry registryLeak.deregister(); // Restore TCCL registryLeak.stop(); // Check for leaks int count = 0; while (count < 10 && registryLeak.leakCheck()) { // Trigger GC System.gc(); try { Thread.sleep(200); } catch (InterruptedException e) { e.printStackTrace(); } count++; } System.out.println("There were " + count + " calls to GC"); if (registryLeak.leakCheck()) { System.out.println("Leak"); } else { System.out.println("No leak"); } } private static final ClassLoader ORIGINAL_CLASS_LOADER = Thread.currentThread().getContextClassLoader(); private WeakReference<ClassLoader> moduleClassLoaderRef; private Registry registry; private void start() { ClassLoader moduleClassLoader = new URLClassLoader(new URL[] {}, ORIGINAL_CLASS_LOADER); Thread.currentThread().setContextClassLoader(moduleClassLoader); moduleClassLoaderRef = new WeakReference<>(moduleClassLoader); } private void register() { try { registry = LocateRegistry.createRegistry(Registry.REGISTRY_PORT); } catch (Exception e) { e.printStackTrace(); } } private void deregister() { try { UnicastRemoteObject.unexportObject(registry, false); } catch (Exception e) { e.printStackTrace(); } } private void stop() { Thread.currentThread().setContextClassLoader(ORIGINAL_CLASS_LOADER); } private boolean leakCheck() { return moduleClassLoaderRef.get() != null; } }
package org.biojava.bio.symbol; import java.util.*; import java.io.*; import org.biojava.bio.*; import org.biojava.bio.seq.*; /** * An n-th order view of another SymbolList. * <P> * In practice, what this means is that you can view a DNA sequence into an * overlapping dinucleotide sequence without having to do any work yourself. * * @author Matthew Pocock */ public class OrderNSymbolList extends AbstractSymbolList implements Serializable { /** * The source sequence that we will transliterate. */ private final SymbolList source; /** * The alphabet for each overlapping tuple. */ private final Alphabet alpha; /** * The view order. */ private final int order; /** * Retrieve the underlying SymbolList being viewed. * * @return the source SymbolList */ public SymbolList getSource() { return source; } /** * Create an order n OrderNSymbolList from source. */ public OrderNSymbolList(SymbolList source, int order) throws IllegalAlphabetException { this.source = source; Alphabet a = source.getAlphabet(); this.alpha = AlphabetManager.getCrossProductAlphabet( Collections.nCopies(order, a) ); this.order = order; } public Alphabet getAlphabet() { return alpha; } public int length() { return source.length() - order + 1; } public Symbol symbolAt(int index) throws IndexOutOfBoundsException { if(index < 1 || index > length()) { throw new IndexOutOfBoundsException( "index must be within (1 .. " + length() + "), not " + index ); } try { // changed to this form to avoid constructing the sub-list objects Symbol [] syms = new Symbol[order]; for(int i = 0; i < order; i++) { syms[i] = source.symbolAt(index + i); } return alpha.getSymbol(Arrays.asList(syms)); } catch (IllegalSymbolException iae) { throw new BioError(iae, "Alphabet changed underneath me"); } } }
/* * CollectionStore.java - Jun 19, 2003 * * @author wolf */ package org.exist.storage.index; import org.exist.collections.Collection; import org.exist.dom.persistent.DocumentImpl; import org.exist.storage.BrokerPool; import org.exist.storage.btree.DBException; import org.exist.storage.btree.Value; import org.exist.storage.lock.ManagedLock; import org.exist.util.*; import java.io.IOException; import java.io.Writer; import java.nio.file.Path; import java.util.ArrayDeque; import java.util.Deque; import java.util.concurrent.locks.ReentrantLock; /** * Handles access to the central collection storage file (collections.dbx). * * @author wolf */ public class CollectionStore extends BFile { public static final short FILE_FORMAT_VERSION_ID = 16; public static final String FILE_NAME = "collections.dbx"; public static final String FILE_KEY_IN_CONFIG = "db-connection.collections"; public final static String FREE_DOC_ID_KEY = "__free_doc_id"; public final static String NEXT_DOC_ID_KEY = "__next_doc_id"; public final static String FREE_COLLECTION_ID_KEY = "__free_collection_id"; public final static String NEXT_COLLECTION_ID_KEY = "__next_collection_id"; public final static byte KEY_TYPE_COLLECTION = 0; public final static byte KEY_TYPE_DOCUMENT = 1; private Deque<Integer> freeResourceIds = new ArrayDeque<>(); private Deque<Integer> freeCollectionIds = new ArrayDeque<>(); /** * @param pool * @param id * @param dataDir * @param config * @throws DBException */ public CollectionStore(BrokerPool pool, byte id, Path dataDir, Configuration config) throws DBException { super(pool, id, FILE_FORMAT_VERSION_ID, true, dataDir.resolve(getFileName()), pool.getCacheManager(), 1.25, 0.03); config.setProperty(getConfigKeyForFile(), this); } public static String getFileName() { return FILE_NAME; } public static String getConfigKeyForFile() { return FILE_KEY_IN_CONFIG; } /* (non-Javadoc) * @see org.exist.storage.store.BFile#getDataSyncPeriod() */ @Override protected long getDataSyncPeriod() { return 1000; } @Override public boolean flush() throws DBException { boolean flushed = false; if (!BrokerPool.FORCE_CORRUPTION) { flushed = flushed | dataCache.flush(); flushed = flushed | super.flush(); } return flushed; } public void freeResourceId(int id) { try(final ManagedLock<ReentrantLock> bfileLock = lockManager.acquireBtreeWriteLock(getLockName())) { freeResourceIds.push(id); } catch (LockException e) { LOG.warn("Failed to acquire lock on " + FileUtils.fileName(getFile()), e); } } public int getFreeResourceId() { int freeDocId = DocumentImpl.UNKNOWN_DOCUMENT_ID; try(final ManagedLock<ReentrantLock> bfileLock = lockManager.acquireBtreeWriteLock(getLockName())) { if (!freeResourceIds.isEmpty()) { freeDocId = freeResourceIds.pop(); } } catch (final LockException e) { LOG.warn("Failed to acquire lock on " + FileUtils.fileName(getFile()), e); return DocumentImpl.UNKNOWN_DOCUMENT_ID; //TODO : rethrow ? -pb } return freeDocId; } public void freeCollectionId(int id) { try(final ManagedLock<ReentrantLock> bfileLock = lockManager.acquireBtreeWriteLock(getLockName())) { freeCollectionIds.push(id); } catch (LockException e) { LOG.warn("Failed to acquire lock on " + FileUtils.fileName(getFile()), e); } } public int getFreeCollectionId() { int freeCollectionId = Collection.UNKNOWN_COLLECTION_ID; try(final ManagedLock<ReentrantLock> bfileLock = lockManager.acquireBtreeWriteLock(getLockName())) { if (!freeCollectionIds.isEmpty()) { freeCollectionId = freeCollectionIds.pop(); } } catch (final LockException e) { LOG.warn("Failed to acquire lock on " + FileUtils.fileName(getFile()), e); return Collection.UNKNOWN_COLLECTION_ID; //TODO : rethrow ? -pb } return freeCollectionId; } protected void dumpValue(Writer writer, Value value) throws IOException { //TODO : what does this 5 stand for ? if (value.getLength() == 5 + Collection.LENGTH_COLLECTION_ID) { final short collectionId = ByteConversion.byteToShort(value.data(), value.start()); //TODO : what does this 1 stand for ? final int docId = ByteConversion.byteToInt(value.data(), value.start() + 1 + Collection.LENGTH_COLLECTION_ID); writer.write('['); writer.write("Document: collection = "); writer.write(collectionId); writer.write(", docId = "); writer.write(docId); writer.write(']'); } else { writer.write('['); writer.write("Collection: "); writer.write(new String(value.data(), value.start(), value.getLength(), "UTF-8")); writer.write(']'); } } public static class DocumentKey extends Value { public static final int OFFSET_TYPE = 0; public static final int LENGTH_TYPE = 1; //sizeof byte public static final int OFFSET_COLLECTION_ID = OFFSET_TYPE + LENGTH_TYPE; public static final int LENGTH_TYPE_DOCUMENT = 2; //sizeof short public static final int OFFSET_DOCUMENT_TYPE = OFFSET_COLLECTION_ID + Collection.LENGTH_COLLECTION_ID; public static final int LENGTH_DOCUMENT_TYPE = 1; //sizeof byte public static final int OFFSET_DOCUMENT_ID = OFFSET_DOCUMENT_TYPE + LENGTH_DOCUMENT_TYPE; public DocumentKey() { data = new byte[LENGTH_TYPE]; data[OFFSET_TYPE] = KEY_TYPE_DOCUMENT; len = LENGTH_TYPE; } public DocumentKey(int collectionId) { data = new byte[LENGTH_TYPE + Collection.LENGTH_COLLECTION_ID]; data[OFFSET_TYPE] = KEY_TYPE_DOCUMENT; ByteConversion.intToByte(collectionId, data, OFFSET_COLLECTION_ID); len = LENGTH_TYPE + Collection.LENGTH_COLLECTION_ID; pos = OFFSET_TYPE; } public DocumentKey(int collectionId, byte type, int docId) { data = new byte[LENGTH_TYPE + Collection.LENGTH_COLLECTION_ID + LENGTH_DOCUMENT_TYPE + DocumentImpl.LENGTH_DOCUMENT_ID]; data[OFFSET_TYPE] = KEY_TYPE_DOCUMENT; ByteConversion.intToByte(collectionId, data, OFFSET_COLLECTION_ID); data[OFFSET_DOCUMENT_TYPE] = type; ByteConversion.intToByte(docId, data, OFFSET_DOCUMENT_ID); len = LENGTH_TYPE + Collection.LENGTH_COLLECTION_ID + LENGTH_DOCUMENT_TYPE + DocumentImpl.LENGTH_DOCUMENT_ID; pos = OFFSET_TYPE; } public static int getCollectionId(Value key) { return ByteConversion.byteToInt(key.data(), key.start() + OFFSET_COLLECTION_ID); } public static int getDocumentId(Value key) { return ByteConversion.byteToInt(key.data(), key.start() + OFFSET_DOCUMENT_ID); } } public static class CollectionKey extends Value { public static final int OFFSET_TYPE = 0; public static final int LENGTH_TYPE = 1; //sizeof byte public static final int OFFSET_VALUE = OFFSET_TYPE + LENGTH_TYPE; public CollectionKey() { data = new byte[LENGTH_TYPE]; data[OFFSET_TYPE] = KEY_TYPE_COLLECTION; len = LENGTH_TYPE; } public CollectionKey(String name) { len = LENGTH_TYPE + UTF8.encoded(name); data = new byte[len]; data[OFFSET_TYPE] = KEY_TYPE_COLLECTION; UTF8.encode(name, data, OFFSET_VALUE); pos = OFFSET_TYPE; } } }
package org.griphyn.vdl.karajan.lib; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import org.apache.log4j.Logger; import org.globus.cog.karajan.arguments.Arg; import org.globus.cog.karajan.arguments.ArgUtil; import org.globus.cog.karajan.arguments.VariableArguments; import org.globus.cog.karajan.stack.StackFrame; import org.globus.cog.karajan.stack.VariableNotFoundException; import org.globus.cog.karajan.stack.VariableStack; import org.globus.cog.karajan.util.BoundContact; import org.globus.cog.karajan.util.ThreadingContext; import org.globus.cog.karajan.util.TypeUtil; import org.globus.cog.karajan.workflow.ExecutionException; import org.globus.cog.karajan.workflow.KarajanRuntimeException; import org.globus.cog.karajan.workflow.futures.Future; import org.globus.cog.karajan.workflow.futures.FutureIterator; import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable; import org.globus.cog.karajan.workflow.nodes.SequentialWithArguments; import org.globus.cog.karajan.workflow.nodes.restartLog.RestartLog; import org.globus.swift.catalog.TCEntry; import org.globus.swift.catalog.transformation.File; import org.globus.swift.catalog.types.TCType; import org.griphyn.vdl.karajan.TCCache; import org.griphyn.vdl.karajan.VDL2FutureException; import org.griphyn.vdl.karajan.WrapperMap; import org.griphyn.vdl.karajan.functions.ConfigProperty; import org.griphyn.vdl.mapping.AbsFile; import org.griphyn.vdl.mapping.DSHandle; import org.griphyn.vdl.mapping.DependentException; import org.griphyn.vdl.mapping.GeneralizedFileFormat; import org.griphyn.vdl.mapping.HandleOpenException; import org.griphyn.vdl.mapping.InvalidPathException; import org.griphyn.vdl.mapping.Mapper; import org.griphyn.vdl.mapping.Path; import org.griphyn.vdl.mapping.PhysicalFormat; import org.griphyn.vdl.type.Type; import org.griphyn.vdl.type.Types; import org.griphyn.vdl.util.FQN; import org.griphyn.vdl.util.VDL2ConfigProperties; public abstract class VDLFunction extends SequentialWithArguments { public static final Logger logger = Logger.getLogger(VDLFunction.class); public static final Arg.Channel ERRORS = new Arg.Channel("errors"); public static final Arg OA_PATH = new Arg.Optional("path", ""); public static final Arg PA_PATH = new Arg.Positional("path"); public static final Arg PA_VAR = new Arg.Positional("var"); public static final Arg OA_ISARRAY = new Arg.Optional("isArray", Boolean.FALSE); public final void post(VariableStack stack) throws ExecutionException { try { Object o = function(stack); if (o != null) { ret(stack, o); } super.post(stack); } catch (HandleOpenException e) { throw new FutureNotYetAvailable(VDLFunction.addFutureListener(stack, e.getSource())); } catch (DependentException e) { // This would not be the primal fault so in non-lazy errors mode it // should not matter throw new ExecutionException("Wrapping a dependent exception in VDLFunction.post() - errors in data dependencies",e); } } protected void ret(VariableStack stack, final Object value) throws ExecutionException { if (value != null) { final VariableArguments vret = ArgUtil.getVariableReturn(stack); if (value.getClass().isArray()) { if (value.getClass().getComponentType().isPrimitive()) { vret.append(value); } else { Object[] array = (Object[]) value; for (int i = 0; i < array.length; i++) { vret.append(array[i]); } } } else { vret.append(value); } } } protected abstract Object function(VariableStack stack) throws ExecutionException, HandleOpenException; /* * This will likely break if the engine changes in fundamental ways. It also * depends on the fact that iteration variable is named '$' in this * particular implementation. */ public static String getThreadPrefix(VariableStack stack) throws ExecutionException { stack = stack.copy(); ThreadingContext last = ThreadingContext.get(stack); Stack s = new Stack(); while (stack.frameCount() > 1) { StackFrame frame = stack.currentFrame(); if (frame.isDefined("$")) { List itv = (List) frame.getVar("$"); s.push(itv.get(0)); stack.leave(); last = ThreadingContext.get(stack); } else { ThreadingContext tc = ThreadingContext.get(stack); if (!last.equals(tc)) { s.push(String.valueOf(last.getLastID())); last = tc; } stack.leave(); } } StringBuffer sb = new StringBuffer(); while (!s.isEmpty()) { sb.append(s.pop()); if (!s.isEmpty()) { sb.append('-'); } } return sb.toString(); } // TODO - is this needed any more? its doing some type inferencing and // object creation and dequoting of strings, but the necessary behaviour // here has possibly moved elsewhere, into a more strongly typed // intermediate // XML form that removes the need for this inference. // we might need to do some casting here for the numerical stuff - eg when // asking for a float but we're given an int? not sure? might be the case // that we already have value in the Double form already, in which case // deference the internal value? // this is only used by VDL new (and really should only be used by // VDL new, and should perhaps move to the VDL new source?) protected Object internalValue(Type type, Object value) { if (Types.FLOAT.equals(type)) { return new Double(TypeUtil.toDouble(value)); } else if (Types.INT.equals(type)) { return new Double(TypeUtil.toInt(value)); } else if (Types.BOOLEAN.equals(type)) { return new Boolean(TypeUtil.toBoolean(value)); } else if (value instanceof String) { return (String) value; } else { return value; } } public static final String[] EMPTY_STRING_ARRAY = new String[0]; public static String[] filename(VariableStack stack) throws ExecutionException { DSHandle handle = (DSHandle)PA_VAR.getValue(stack); return filename(stack, handle); } public static String[] filename(VariableStack stack, DSHandle handle) throws ExecutionException { try { return filename(handle); } catch(VDL2FutureException ve) { throw new FutureNotYetAvailable(addFutureListener(stack, ve.getHandle())); } catch (HandleOpenException e) { throw new FutureNotYetAvailable(addFutureListener(stack, e.getSource())); } } public static String[] filename(DSHandle var) throws ExecutionException, HandleOpenException { try { if (var.getType().isArray()) { return leavesFileNames(var); } else if(var.getType().getFields().size() > 0) { return leavesFileNames(var); } else { return new String[] { leafFileName(var) }; } } catch (DependentException e) { return new String[0]; } } private static String[] leavesFileNames(DSHandle var) throws ExecutionException, HandleOpenException { Mapper mapper; synchronized (var.getRoot()) { mapper = var.getMapper(); } List l = new ArrayList(); Iterator i; try { Collection fp = var.getFringePaths(); List src; if (fp instanceof List) { src = (List) fp; } else { src = new ArrayList(fp); } Collections.sort(src, new PathComparator()); i = src.iterator(); while (i.hasNext()) { Path p = (Path) i.next(); l.add(leafFileName(var.getField(p), mapper)); } } catch (InvalidPathException e) { throw new ExecutionException("DSHandle is lying about its fringe paths"); } return (String[]) l.toArray(EMPTY_STRING_ARRAY); } private static class PathComparator implements Comparator { public int compare(Object o1, Object o2) { Path p1 = (Path) o1; Path p2 = (Path) o2; for (int i = 0; i < Math.min(p1.size(), p2.size()); i++) { int d; d = indexOrder(p1.isArrayIndex(i), p2.isArrayIndex(i)); if (d != 0) { return d; } if (p1.isArrayIndex(i)) { d = numericOrder(p1.getElement(i), p2.getElement(i)); } else { d = p1.getElement(i).compareTo(p2.getElement(i)); } if (d != 0) { return d; } } //the longer one wins return p1.size() - p2.size(); } private int indexOrder(boolean i1, boolean i2) { //it doesn't matter much what the order between indices and non-indices is, //but it needs to be consistent if (i1) { if (!i2) { return -1; } } else { if (i2) { return 1; } } return 0; } private int numericOrder(String i1, String i2) { //TODO check if we're actually dealing with numeric indices return Integer.parseInt(i1) - Integer.parseInt(i2); } } private static String leafFileName(DSHandle var) throws ExecutionException { Mapper mapper; synchronized (var.getRoot()) { mapper = var.getMapper(); } return leafFileName(var, mapper); } private static String leafFileName(DSHandle var, Mapper mapper) throws ExecutionException { if (Types.STRING.equals(var.getType())) { return relativize(String.valueOf(var.getValue())); } else { if (var.getMapper() == null) { throw new ExecutionException("Cannot invoke filename() on data without a mapper: " + var); } PhysicalFormat f = var.getMapper().map(var.getPathFromRoot()); if (f instanceof GeneralizedFileFormat) { String filename = ((GeneralizedFileFormat) f).getURIAsString(); if (filename == null) { throw new ExecutionException("Mapper did not provide a file name"); } else { return filename; } } else if (f == null) { throw new ExecutionException("Mapper failed to map " + var); } else { throw new ExecutionException("Only file formats are supported for now"); } } } protected Object pathOnly(Object f) { if (f instanceof String[]) { return pathOnly((String[]) f); } else { return pathOnly((String) f); } } protected static String pathOnly(String file) { return new AbsFile(file).getPath(); } protected String[] pathOnly(String[] files) { String[] p = new String[files.length]; for (int i = 0; i < files.length; i++) { p[i] = pathOnly(files[i]); } return p; } /** * Given an input of an array of strings, returns a single string with the * input strings separated by a space. If the 'relative' flag is set to * true, then each input string will be passed through the relativize * function. */ public String argList(String[] s, boolean relative) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < s.length; i++) { if (relative) { s[i] = relativize(s[i]); } sb.append(s[i]); if (i < s.length - 1) { sb.append(' '); } } return sb.toString(); } /** * removes leading / character from a supplied filename if present, so that * the path can be used as a relative path. */ public static String relativize(String name) { name = pathOnly(name); if (name != null && name.length() > 0 && name.charAt(0) == '/') { return name.substring(1); } else { return name; } } public static final String VDL_FUTURE_WRAPPER_MAP = "#vdl:futureWrapperMap"; public static WrapperMap getFutureWrapperMap(VariableStack stack) throws ExecutionException { synchronized (stack.getExecutionContext()) { WrapperMap hash = (WrapperMap) stack.firstFrame().getVar(VDL_FUTURE_WRAPPER_MAP); if (hash == null) { hash = new WrapperMap(); stack.firstFrame().setVar(VDL_FUTURE_WRAPPER_MAP, hash); //InHook.install(new Monitor(hash)); } return hash; } } protected static Map getLogData(VariableStack stack) throws ExecutionException { try { return (Map) stack.getDeepVar(RestartLog.LOG_DATA); } catch (VariableNotFoundException e) { throw new ExecutionException("No log data found. Missing restartLog()?"); } } protected boolean compatible(Type expectedType, Type actualType) { if (expectedType.equals(Types.FLOAT)) { if (actualType.equals(Types.FLOAT) || actualType.equals(Types.INT)) { return true; } else { return false; } } else if (expectedType.equals(Types.FLOAT.arrayType())) { if (actualType.equals(Types.FLOAT.arrayType()) || actualType.equals(Types.INT.arrayType())) { return true; } else { return false; } } else if (expectedType.equals(Types.ANY)) { return true; } else { return actualType.equals(expectedType); } } protected void closeChildren(VariableStack stack, DSHandle handle) throws ExecutionException, InvalidPathException { WrapperMap hash = getFutureWrapperMap(stack); // Close the future boolean closed; synchronized(handle.getRoot()) { closed = handle.isClosed(); if (!closed) { handle.closeShallow(); hash.close(handle); } } try { // Mark all leaves Iterator it = handle.getFields(Path.CHILDREN).iterator(); while (it.hasNext()) { DSHandle child = (DSHandle) it.next(); child.closeShallow(); hash.close(child); } } catch (HandleOpenException e) { throw new ExecutionException("Handle open in closeChildren",e); } if (!closed) { markToRoot(stack, handle); } } protected void closeDeep(VariableStack stack, DSHandle handle) throws ExecutionException, InvalidPathException { synchronized(handle.getRoot()) { closeDeep(stack, handle, getFutureWrapperMap(stack)); } } private void closeDeep(VariableStack stack, DSHandle handle, WrapperMap hash) throws InvalidPathException, ExecutionException { handle.closeShallow(); hash.close(handle); try { // Mark all leaves Iterator it = handle.getFields(Path.CHILDREN).iterator(); while (it.hasNext()) { closeDeep(stack, (DSHandle) it.next(), hash); } } catch (HandleOpenException e) { throw new ExecutionException("Handle open in closeChildren",e); } } private void markToRoot(VariableStack stack, DSHandle handle) throws ExecutionException { // Also mark all arrays from root Path fullPath = handle.getPathFromRoot(); DSHandle root = handle.getRoot(); synchronized(root) { for (int i = 0; i < fullPath.size(); i++) { if (fullPath.isArrayIndex(i)) { try { markAsAvailable(stack, root.getField(fullPath.subPath(0, i)), fullPath.getElement(i)); } catch (InvalidPathException e) { e.printStackTrace(); } } } } } /** Returns the DSHandle that it is passed, but ensuring that it is closed. If the handle is not closed, then execution will be deferred/retried until it is. */ static public DSHandle waitFor(VariableStack stack, DSHandle handle) throws ExecutionException { synchronized(handle.getRoot()) { if (!handle.isClosed()) { throw new FutureNotYetAvailable(addFutureListener(stack, handle)); } } return handle; } protected static void closeShallow(VariableStack stack, DSHandle handle) throws ExecutionException { synchronized (handle.getRoot()) { handle.closeShallow(); getFutureWrapperMap(stack).close(handle); } } public static Future addFutureListener(VariableStack stack, DSHandle handle) throws ExecutionException { assert Thread.holdsLock(handle.getRoot()); return getFutureWrapperMap(stack).addNodeListener(handle); } protected static FutureIterator addFutureListListener(VariableStack stack, DSHandle handle, Map value) throws ExecutionException { assert Thread.holdsLock(handle.getRoot()); return getFutureWrapperMap(stack).addFutureListListener(handle, value).futureIterator(stack); } protected void markAsAvailable(VariableStack stack, DSHandle handle, Object key) throws ExecutionException { getFutureWrapperMap(stack).markAsAvailable(handle, key); } public static Path parsePath(Object o, VariableStack stack) throws ExecutionException { Path q = Path.EMPTY_PATH; Path p; if (o instanceof Path) { p = (Path) o; } else { p = Path.parse(TypeUtil.toString(o)); } for (int i = 0; i < p.size(); i++) { if (p.isArrayIndex(i)) { if (p.isWildcard(i)) { q = q.addLast(p.getElement(i), true); } else { String index = p.getElement(i); try { // check this is can parse as an integer by trying to parse and getting an exception if not Integer.parseInt(index); q = q.addLast(index, true); } catch (NumberFormatException e) { Object v = stack.getVar(index); if (v instanceof DSHandle) { v = ((DSHandle) v).getValue(); } q = q.addLast(TypeUtil.toString(v), true); } } } else { q = q.addLast(p.getElement(i)); } } if (p.hasWildcards() && !q.hasWildcards()) { throw new RuntimeException("Error in the wildcard processing routine"); } return q; } private static Set warnset = new HashSet(); protected TCEntry getTCE(TCCache tc, FQN fqn, BoundContact bc) { List l; try { l = tc.getTCEntries(fqn, bc.getHost(), TCType.INSTALLED); } catch (Exception e) { throw new KarajanRuntimeException(e); } if (l == null || l.isEmpty()) { return null; } if (l.size() > 1) { synchronized (warnset) { LinkedList wl = new LinkedList(); wl.add(fqn); wl.add(bc); if (!warnset.contains(wl)) { logger.warn("Multiple entries found for " + fqn + " on " + bc + ". Using the first one"); warnset.add(wl); } } } return (TCEntry) l.get(0); } public static final String TC = "vdl:TC"; public static TCCache getTC(VariableStack stack) throws ExecutionException { synchronized (stack.firstFrame()) { TCCache tc = (TCCache) stack.firstFrame().getVar(TC); if (tc == null) { String prop = ConfigProperty.getProperty(VDL2ConfigProperties.TC_FILE, stack); tc = new TCCache(File.getNonSingletonInstance(prop)); stack.firstFrame().setVar(TC, tc); } return tc; } } private static int provenanceIDCount = 451000; public static synchronized int nextProvenanceID() { return provenanceIDCount++; } public static void logProvenanceResult(int id, DSHandle result, String name) throws ExecutionException { if (logger.isDebugEnabled()) logger.debug("FUNCTION id="+id+" name="+name+" result="+result.getIdentifier()); else if (logger.isInfoEnabled()) logger.info("FUNCTION: " + name + "()"); } public static void logProvenanceParameter(int id, DSHandle parameter, String paramName) throws ExecutionException { if (logger.isDebugEnabled()) logger.debug("FUNCTIONPARAMETER id="+id+" input="+parameter.getIdentifier()+" name="+paramName); } }
package org.apache.lenya.util; import java.util.Date; import java.util.HashMap; import java.util.SortedMap; import java.util.TreeMap; import org.apache.log4j.Category; /** * A map with a maximum capacity. When the map is full, the oldest entry is removed. * * @author <a href="mailto:andreas@apache.org"/> */ public class CacheMap extends HashMap { private static final Category log = Category.getInstance(CacheMap.class); /** * Ctor. * @param capacity The maximum number of entries. */ public CacheMap(int capacity) { assert capacity > -1; this.capacity = capacity; } private int capacity; private SortedMap timeToKey = new TreeMap(); /** * @see java.util.Map#put(Object, Object) */ public Object put(Object key, Object value) { if (size() == capacity) { Object oldestKey = timeToKey.get(timeToKey.firstKey()); remove(oldestKey); if (log.isDebugEnabled()) { log.debug("Clearing cache"); } } timeToKey.put(new Date(), key); return super.put(key, value); } /** * @see java.util.Map#get(java.lang.Object) */ public Object get(Object key) { Object result = super.get(key); if (log.isDebugEnabled()) { if (result != null) { log.debug("Using cached object for key [" + key + "]"); } else { log.debug("No cached object for key [" + key + "]"); } } return result; } }
package org.jitsi.android.gui.chat; import java.text.*; import java.util.*; import android.app.*; import android.content.*; import android.graphics.drawable.*; import android.os.*; import android.text.*; import android.text.ClipboardManager; import android.text.method.*; import android.view.*; import android.widget.*; import android.widget.LinearLayout.*; import net.java.sip.communicator.service.contactlist.*; import net.java.sip.communicator.service.globaldisplaydetails.*; import net.java.sip.communicator.service.protocol.*; import net.java.sip.communicator.service.protocol.event.*; import net.java.sip.communicator.service.protocol.globalstatus.*; import net.java.sip.communicator.util.*; import net.java.sip.communicator.util.Logger; import org.jitsi.*; import org.jitsi.android.*; import org.jitsi.android.gui.*; import org.jitsi.android.gui.account.*; import org.jitsi.android.gui.contactlist.*; import org.jitsi.android.gui.util.*; import org.jitsi.android.gui.util.event.EventListener; import org.jitsi.service.osgi.*; /** * The <tt>ChatFragment</tt> is responsible for chat interface. * * @author Yana Stamcheva * @author Pawel Domas */ public class ChatFragment extends OSGiFragment { /** * The logger */ private static final Logger logger = Logger.getLogger(ChatFragment.class); /** * The session adapter for the contained <tt>ChatSession</tt>. */ private ChatListAdapter chatListAdapter; /** * The corresponding <tt>ChatSession</tt>. */ private ChatSession chatSession; /** * The chat list view representing the chat. */ private ListView chatListView; /** * List header used to display progress bar when history is being loaded. */ private View header; /** * Remembers first visible view to scroll the list after new portion of * history messages is added. */ public int scrollFirstVisible; /** * Remembers top position to add to the scrolling offset after new portion * of history messages is added. */ public int scrollTopOffset; /** * The chat typing view. */ private LinearLayout typingView; /** * The task that loads history. */ private LoadHistoryTask loadHistoryTask; /** * Indicates that this fragment is visible to the user. * This is important, because of PagerAdapter being used on phone layouts, * which doesn't properly call onResume() when switched page fragment is * displayed. */ private boolean visibleToUser = false; /** * The chat controller used to handle operations like editing and sending * messages used by this fragment. */ private ChatController chatController; /** * Refresh avatar and globals status display on change. */ private EventListener<PresenceStatus> globalStatusListener = new EventListener<PresenceStatus>() { @Override public void onChangeEvent(PresenceStatus eventObject) { if(chatListAdapter != null) chatListAdapter.localAvatarOrStatusChanged(); } }; /** * Returns the corresponding <tt>ChatSession</tt>. * * @return the corresponding <tt>ChatSession</tt> */ public ChatSession getChatSession() { return chatSession; } /** * Returns the underlying chat list view. * * @return the underlying chat list view */ public ListView getChatListView() { return chatListView; } /** * Returns the underlying chat list view. * * @return the underlying chat list view */ public ChatListAdapter getChatListAdapter() { return chatListAdapter; } /** * {@inheritDoc} */ @Override public View onCreateView( LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View content = inflater.inflate( R.layout.chat_conversation, container, false); chatListAdapter = new ChatListAdapter(); chatListView = (ListView) content.findViewById(R.id.chatListView); // Inflates and adds the header, hidden by default this.header = inflater.inflate(R.layout.progressbar, chatListView, false); header.setVisibility(View.GONE); chatListView.addHeaderView(header); // Registers for chat message context menu registerForContextMenu(chatListView); typingView = (LinearLayout) content.findViewById(R.id.typingView); chatListView.setAdapter(chatListAdapter); chatListView.setSelector(R.drawable.contact_list_selector); chatListView.setOnScrollListener(new AbsListView.OnScrollListener() { @Override public void onScrollStateChanged(AbsListView view, int scrollState) { // Detects event when user scrolls to the top of the list if(scrollState == 0) { if(chatListView.getChildAt(0).getTop()==0) { // Loads some more history // if there's no loading task in progress if(loadHistoryTask == null) { loadHistoryTask = new LoadHistoryTask(); loadHistoryTask.execute(); } } } } @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { // Remembers scrolling position to restore after new history // messages are loaded scrollFirstVisible = firstVisibleItem; View firstVisible = view.getChildAt(0); scrollTopOffset = firstVisible != null ? firstVisible.getTop() : 0; } }); // Chat intent handling Bundle arguments = getArguments(); String chatId = arguments.getString(ChatSessionManager.CHAT_IDENTIFIER); if(chatId == null) throw new IllegalArgumentException(); chatSession = ChatSessionManager.getActiveChat(chatId); return content; } /** * {@inheritDoc} */ @Override public void onAttach(Activity activity) { super.onAttach(activity); this.chatController = new ChatController(activity, this); } /** * This method must be called by parent <tt>Activity</tt> or * <tt>Fragment</tt> in order to register the chat controller. * * @param isVisible <tt>true</tt> if the fragment is now visible to * the user. * @see ChatController */ public void setVisibleToUser(boolean isVisible) { logger.debug("View visible to user: " + hashCode()+" "+isVisible); this.visibleToUser = isVisible; checkInitController(); } /** * Checks for <tt>ChatController</tt> initialization. To init the controller * fragment must be visible and it's View must be created. * * If fragment is no longer visible the controller will be uninitialized. */ private void checkInitController() { if(visibleToUser && chatListView != null) { logger.debug("Init controller: " + hashCode()); chatController.onShow(); // Also register global status listener AndroidGUIActivator.getLoginRenderer() .addGlobalStatusListener(globalStatusListener); } else if(!visibleToUser) { chatController.onHide(); // Also remove global status listener AndroidGUIActivator.getLoginRenderer() .removeGlobalStatusListener(globalStatusListener); } else { logger.debug("Skipping controller init... " + hashCode()); } } /** * Initializes the chat list adapter. */ private void initAdapter() { loadHistoryTask = new LoadHistoryTask(); loadHistoryTask.execute(); } @Override public void onResume() { super.onResume(); initAdapter(); // If added to the pager adapter for the first time it is required // to check again, because it's marked visible when the Views // are not created yet checkInitController(); chatSession.addMessageListener(chatListAdapter); chatSession.addContactStatusListener(chatListAdapter); chatSession.addTypingListener(chatListAdapter); } @Override public void onPause() { chatSession.removeMessageListener(chatListAdapter); chatSession.removeContactStatusListener(chatListAdapter); chatSession.removeTypingListener(chatListAdapter); /* * Indicates that this fragment is no longer visible, * because of this call parent <tt>Activities don't have to call it * in onPause(). */ setVisibleToUser(false); super.onPause(); } /** * {@inheritDoc} */ @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) { super.onCreateContextMenu(menu, v,menuInfo); // Creates chat message context menu getActivity().getMenuInflater().inflate(R.menu.chat_msg_ctx_menu, menu); } /** * {@inheritDoc} */ @Override public boolean onContextItemSelected(MenuItem item) { if(item.getItemId() == R.id.copy_to_clipboard) { AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) item.getMenuInfo(); // Clicked position must be aligned to list headers count int position = info.position - chatListView.getHeaderViewsCount(); // Gets clicked message ChatMessage clickedMsg = chatListAdapter.getMessage(position); // Copy message content to clipboard ClipboardManager clipboardManager = (ClipboardManager) getActivity() .getSystemService(Context.CLIPBOARD_SERVICE); clipboardManager.setText(clickedMsg.getContentForClipboard()); return true; } return super.onContextItemSelected(item); } /** * Creates new parametrized instance of <tt>CallContactFragment</tt>. * * @param chatId optional phone number that will be filled. * @return new parametrized instance of <tt>CallContactFragment</tt>. */ public static ChatFragment newInstance(String chatId) { if (logger.isDebugEnabled()) logger.debug("CHAT FRAGMENT NEW INSTANCE: " + chatId); ChatFragment chatFragment = new ChatFragment(); Bundle args = new Bundle(); args.putString(ChatSessionManager.CHAT_IDENTIFIER, chatId); chatFragment.setArguments(args); return chatFragment; } public void onDetach() { if (logger.isDebugEnabled()) logger.debug("DETACH CHAT FRAGMENT: " + this); super.onDetach(); chatListAdapter = null; if (loadHistoryTask != null) { loadHistoryTask.cancel(true); loadHistoryTask = null; } } class ChatListAdapter extends BaseAdapter implements ChatSession.ChatSessionListener, ContactPresenceStatusListener, TypingNotificationsListener { /** * The list of chat message displays. */ private final List<MessageDisplay> messages = new ArrayList<MessageDisplay>(); /** * The type of the incoming message view. */ final int INCOMING_MESSAGE_VIEW = 0; /** * The type of the outgoing message view. */ final int OUTGOING_MESSAGE_VIEW = 1; /** * The type of the system message view. */ final int SYSTEM_MESSAGE_VIEW = 2; /** * The type of the error message view. */ final int ERROR_MESSAGE_VIEW = 3; /** * Counter used to generate row ids. */ private long idGenerator=0; /** * HTML image getter. */ private final Html.ImageGetter imageGetter = new HtmlImageGetter(); /** * Passes the message to the contained <code>ChatConversationPanel</code> * for processing and appends it at the end of the conversationPanel * document. * */ public void addMessage( ChatMessage newMessage, boolean update) { synchronized (messages) { int lastMsgIdx = getLastMessageIdx(newMessage); ChatMessage lastMsg = lastMsgIdx != -1 ? chatListAdapter.getMessage(lastMsgIdx) : null; if(lastMsg == null || !lastMsg.isConsecutiveMessage(newMessage)) { messages.add(new MessageDisplay(newMessage)); } else { // Merge the message and update the object in the list messages.get(lastMsgIdx) .update(lastMsg.mergeMessage(newMessage)); } } if(update) { runOnUiThread(new Runnable() { @Override public void run() { chatListAdapter.notifyDataSetChanged(); // List must be scrolled manually, when // android:transcriptMode="normal" is set chatListView.setSelection(chatListAdapter.getCount()); } }); } } /** * Inserts given <tt>Collection</tt> of <tt>ChatMessage</tt> at * the beginning of the list. * * @param collection the collection of <tt>ChatMessage</tt> to prepend. */ public void prependMessages(Collection<ChatMessage> collection) { List<MessageDisplay> newMsgs = new ArrayList<MessageDisplay>(); Iterator<ChatMessage> iterator = collection.iterator(); MessageDisplay previous = null; while(iterator.hasNext()) { ChatMessage next = iterator.next(); if(previous == null || !previous.msg.isConsecutiveMessage(next)) { previous = new MessageDisplay(next); newMsgs.add(previous); } else { // Merge the message and update the object in the list previous.update(previous.msg.mergeMessage(next)); } } messages.addAll(0, newMsgs); } /** * Finds index of the message that will handle <tt>newMessage</tt> * merging process(usually just the last one). If the * <tt>newMessage</tt> is a correction message, then the last message * of the same type will be returned. * * @param newMessage the next message to be merged into the adapter. * * @return index of the message that will handle <tt>newMessage</tt> * merging process. If <tt>newMessage</tt> is a correction message, * then the last message of the same type will be returned. */ private int getLastMessageIdx(ChatMessage newMessage) { // If it's not a correction message then jus return the last one if(newMessage.getCorrectedMessageUID() == null) return chatListAdapter.getCount()-1; // Search for the same type int msgType = newMessage.getMessageType(); for(int i=getCount()-1; i>= 0; i { ChatMessage candidate = getMessage(i); if(candidate.getMessageType() == msgType) { return i; } } return -1; } /** * {@inheritDoc} */ public int getCount() { synchronized (messages) { return messages.size(); } } /** * {@inheritDoc} */ public Object getItem(int position) { synchronized (messages) { if (logger.isDebugEnabled()) logger.debug("OBTAIN CHAT ITEM ON POSITION: " + position); return messages.get(position); } } ChatMessage getMessage(int pos) { return ((MessageDisplay) getItem(pos)).msg; } MessageDisplay getMessageDisplay(int pos) { return (MessageDisplay) getItem(pos); } /** * {@inheritDoc} */ public long getItemId(int pos) { return messages.get(pos).id; } public int getViewTypeCount() { return 4; } public int getItemViewType(int position) { ChatMessage message = getMessage(position); int messageType = message.getMessageType(); if (messageType == ChatMessage.INCOMING_MESSAGE) return INCOMING_MESSAGE_VIEW; else if (messageType == ChatMessage.OUTGOING_MESSAGE) return OUTGOING_MESSAGE_VIEW; else if (messageType == ChatMessage.SYSTEM_MESSAGE) return SYSTEM_MESSAGE_VIEW; else if(messageType == ChatMessage.ERROR_MESSAGE) return ERROR_MESSAGE_VIEW; return 0; } /** * {@inheritDoc} */ public View getView(int position, View convertView, ViewGroup parent) { // Keeps reference to avoid future findViewById() MessageViewHolder messageViewHolder; if (convertView == null) { LayoutInflater inflater = getActivity().getLayoutInflater(); messageViewHolder = new MessageViewHolder(); int viewType = getItemViewType(position); messageViewHolder.viewType = viewType; if (viewType == INCOMING_MESSAGE_VIEW) { convertView = inflater.inflate( R.layout.chat_incoming_row, parent, false); messageViewHolder.avatarView = (ImageView) convertView.findViewById( R.id.incomingAvatarIcon); messageViewHolder.statusView = (ImageView) convertView.findViewById( R.id.incomingStatusIcon); messageViewHolder.messageView = (TextView) convertView.findViewById( R.id.incomingMessageView); messageViewHolder.timeView = (TextView) convertView.findViewById( R.id.incomingTimeView); messageViewHolder.typingView = (ImageView) convertView.findViewById( R.id.typingImageView); } else if(viewType == OUTGOING_MESSAGE_VIEW) { convertView = inflater.inflate( R.layout.chat_outgoing_row, parent, false); messageViewHolder.avatarView = (ImageView) convertView.findViewById( R.id.outgoingAvatarIcon); messageViewHolder.statusView = (ImageView) convertView.findViewById( R.id.outgoingStatusIcon); messageViewHolder.messageView = (TextView) convertView.findViewById( R.id.outgoingMessageView); messageViewHolder.timeView = (TextView) convertView.findViewById( R.id.outgoingTimeView); } else { // System or error view convertView = inflater.inflate( viewType == SYSTEM_MESSAGE_VIEW ? R.layout.chat_system_row : R.layout.chat_error_row, parent, false); messageViewHolder.messageView = (TextView) convertView.findViewById( R.id.messageView); } convertView.setTag(messageViewHolder); } else { messageViewHolder = (MessageViewHolder) convertView.getTag(); } MessageDisplay message = getMessageDisplay(position); if (message != null) { if(messageViewHolder.viewType == INCOMING_MESSAGE_VIEW || messageViewHolder.viewType == OUTGOING_MESSAGE_VIEW) { updateStatusAndAvatarView(messageViewHolder); messageViewHolder.timeView.setText(message.getDateStr()); } messageViewHolder.messageView.setText(message.getBody()); // Html links are handled only for system messages, which is // currently used for displaying OTR authentication dialog. // Otherwise settings movement method prevent form firing // on item clicked events. int currentMsgType = message.msg.getMessageType(); if(messageViewHolder.msgType != currentMsgType) { MovementMethod movementMethod = null; if(currentMsgType == ChatMessage.SYSTEM_MESSAGE) { movementMethod = LinkMovementMethod.getInstance(); } messageViewHolder .messageView .setMovementMethod(movementMethod); } } return convertView; } /** * Updates status and avatar views on given <tt>MessageViewHolder</tt>. * @param viewHolder the <tt>MessageViewHolder</tt> to update. */ private void updateStatusAndAvatarView(MessageViewHolder viewHolder) { Drawable avatar = null; Drawable status = null; if (viewHolder.viewType == INCOMING_MESSAGE_VIEW) { avatar = ContactListAdapter.getAvatarDrawable( chatSession.getMetaContact()); status = ContactListAdapter.getStatusDrawable( chatSession.getMetaContact()); } else if (viewHolder.viewType == OUTGOING_MESSAGE_VIEW) { AndroidLoginRenderer loginRenderer = AndroidGUIActivator.getLoginRenderer(); avatar = loginRenderer.getLocalAvatarDrawable(); status = loginRenderer.getLocalStatusDrawable(); } setAvatar(viewHolder.avatarView, avatar); setStatus(viewHolder.statusView, status); } @Override public void messageDelivered(final MessageDeliveredEvent evt) { final Contact contact = evt.getDestinationContact(); final MetaContact metaContact = AndroidGUIActivator.getContactListService() .findMetaContactByContact(contact); if (logger.isTraceEnabled()) logger.trace("MESSAGE DELIVERED to contact: " + contact.getAddress()); if (metaContact != null && chatSession.getMetaContact().equals(metaContact)) { final ChatMessageImpl msg = ChatMessageImpl.getMsgForEvent(evt); if (logger.isTraceEnabled()) logger.trace( "MESSAGE DELIVERED: process message to chat for contact: " + contact.getAddress() + " MESSAGE: " + msg.getMessage()); addMessage(msg, true); } } @Override public void messageDeliveryFailed(MessageDeliveryFailedEvent arg0) { // Do nothing, handled in ChatSession } @Override public void messageReceived(final MessageReceivedEvent evt) { if (logger.isTraceEnabled()) logger.trace("MESSAGE RECEIVED from contact: " + evt.getSourceContact().getAddress()); final Contact protocolContact = evt.getSourceContact(); final MetaContact metaContact = AndroidGUIActivator.getContactListService() .findMetaContactByContact(protocolContact); if(metaContact != null && chatSession.getMetaContact().equals(metaContact)) { final ChatMessageImpl msg = ChatMessageImpl.getMsgForEvent(evt); addMessage(msg, true); } else { if (logger.isTraceEnabled()) logger.trace("MetaContact not found for protocol contact: " + protocolContact + "."); } } @Override public void messageAdded(ChatMessage msg) { addMessage(msg, true); } /** * Indicates a contact has changed its status. */ @Override public void contactPresenceStatusChanged( ContactPresenceStatusChangeEvent evt) { Contact sourceContact = evt.getSourceContact(); if (logger.isDebugEnabled()) logger.debug("Contact presence status changed: " + sourceContact.getAddress()); if (!chatSession.getMetaContact().containsContact(sourceContact)) return; new UpdateStatusTask().execute(); } @Override public void typingNotificationDeliveryFailed( TypingNotificationEvent evt) { } @Override public void typingNotificationReceived(TypingNotificationEvent evt) { if (logger.isDebugEnabled()) logger.debug("Typing notification received: " + evt.getSourceContact().getAddress()); TypingNotificationHandler .handleTypingNotificationReceived(evt, ChatFragment.this); } /** * Removes all messages from the adapter */ public void removeAllMessages() { messages.clear(); } /** * Updates all avatar and status on outgoing messages rows. */ public void localAvatarOrStatusChanged() { runOnUiThread(new Runnable() { @Override public void run() { for(int i=0; i<chatListView.getChildCount(); i++) { View row = chatListView.getChildAt(i); updateStatusAndAvatarView( (MessageViewHolder) row.getTag()); } } }); } /** * Class used to cache processed message contents. Prevents from * re-processing on each View display. */ class MessageDisplay { /** * Row identifier. */ private final long id; /** * Displayed <tt>ChatMessage</tt> */ private ChatMessage msg; /** * Date string cache */ private String dateStr; /** * Message body cache */ private Spanned body; /** * Creates new instance of <tt>MessageDisplay</tt> that will be used * for displaying given <tt>ChatMessage</tt>. * * @param msg the <tt>ChatMessage</tt> that will be displayed by * this instance. */ MessageDisplay(ChatMessage msg) { this.msg = msg; this.id = idGenerator++; } /** * Returns formatted date string for the <tt>ChatMessage</tt>. * @return formatted date string for the <tt>ChatMessage</tt>. */ public String getDateStr() { if(dateStr == null) { Date date = msg.getDate(); dateStr = GuiUtils.formatDate(date) + " " + GuiUtils.formatTime(date); } return dateStr; } /** * Returns <tt>Spanned</tt> message body processed for HTML tags. * @return <tt>Spanned</tt> message body. */ public Spanned getBody() { if(body == null) { body = Html.fromHtml(msg.getMessage(), imageGetter, null); } return body; } /** * Updates this display instance with new message causing display * contents to be invalidated. * @param chatMessage new message content */ public void update(ChatMessage chatMessage) { dateStr = null; body = null; msg = chatMessage; } } } static class MessageViewHolder { ImageView avatarView; ImageView statusView; ImageView typeIndicator; TextView messageView; TextView timeView; ImageView typingView; int viewType; int position; int msgType; } /** * Loads the history in an asynchronous thread and then adds the history * messages to the user interface. */ private class LoadHistoryTask extends AsyncTask<Void, Void, Collection<ChatMessage>> { /** * Remembers adapter size before new messages were added. */ private int preSize; @Override protected void onPreExecute() { super.onPreExecute(); header.setVisibility(View.VISIBLE); this.preSize = chatListAdapter.getCount(); } @Override protected Collection<ChatMessage> doInBackground(Void... params) { return chatSession.getHistory(preSize == 0); } @Override protected void onPostExecute(Collection<ChatMessage> result) { super.onPostExecute(result); chatListAdapter.prependMessages(result); header.setVisibility(View.GONE); chatListAdapter.notifyDataSetChanged(); int loaded = chatListAdapter.getCount() - preSize; int scrollTo = loaded + scrollFirstVisible; chatListView.setSelectionFromTop(scrollTo, scrollTopOffset); loadHistoryTask = null; } } /** * Updates the status user interface. */ private class UpdateStatusTask extends AsyncTask<Void, Void, Void> { protected Void doInBackground(Void... params) { return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); for (int i = 0; i < chatListView.getChildCount(); i++) { View chatRowView = chatListView.getChildAt(i); MessageViewHolder viewHolder = (MessageViewHolder) chatRowView.getTag(); if (viewHolder != null && viewHolder.viewType == chatListAdapter.INCOMING_MESSAGE_VIEW) { Drawable status = ContactListAdapter .getStatusDrawable( chatSession.getMetaContact()); ImageView statusView = viewHolder.statusView; setStatus(statusView, status); } } } } /** * Returns the local user avatar drawable. * * @return the local user avatar drawable */ private static Drawable getLocalAvatarDrawable() { GlobalDisplayDetailsService displayDetailsService = AndroidGUIActivator.getGlobalDisplayDetailsService(); byte[] avatarImage = displayDetailsService.getGlobalDisplayAvatar(); if (avatarImage != null) return AndroidImageUtil.drawableFromBytes(avatarImage); return null; } /** * Returns the local user status drawable. * * @return the local user status drawable */ private static Drawable getLocalStatusDrawable() { GlobalStatusService globalStatusService = AndroidGUIActivator.getGlobalStatusService(); byte[] statusImage = StatusUtil.getContactStatusIcon( globalStatusService.getGlobalPresenceStatus()); return AndroidImageUtil.drawableFromBytes(statusImage); } /** * Sets the avatar icon for the given avatar view. * * @param avatarView the avatar image view * @param avatarDrawable the avatar drawable to set */ public void setAvatar( ImageView avatarView, Drawable avatarDrawable) { if (avatarDrawable == null) { avatarDrawable = JitsiApplication.getAppResources() .getDrawable(R.drawable.avatar); } avatarView.setImageDrawable(avatarDrawable); } /** * Sets the status of the given view. * * @param statusView the status icon view * @param statusDrawable the status drawable */ public void setStatus( ImageView statusView, Drawable statusDrawable) { statusView.setImageDrawable(statusDrawable); } /** * Sets the appropriate typing notification interface. * * @param typingState the typing state that should be represented in the * view */ public void setTypingState(int typingState) { if (typingView == null) return; TextView typingTextView = (TextView) typingView.findViewById(R.id.typingTextView); ImageView typingImgView = (ImageView) typingView.findViewById(R.id.typingImageView); boolean setVisible = false; if (typingState == OperationSetTypingNotifications.STATE_TYPING) { Drawable typingDrawable = typingImgView.getDrawable(); if (!(typingDrawable instanceof AnimationDrawable)) { typingImgView.setImageResource(R.drawable.typing_drawable); typingDrawable = typingImgView.getDrawable(); } if(!((AnimationDrawable) typingDrawable).isRunning()) { AnimationDrawable animatedDrawable = (AnimationDrawable) typingDrawable; animatedDrawable.setOneShot(false); animatedDrawable.start(); } typingTextView.setText(chatSession.getShortDisplayName() + " " + getResources() .getString(R.string.service_gui_CONTACT_TYPING)); setVisible = true; } else if (typingState == OperationSetTypingNotifications.STATE_PAUSED) { typingImgView.setImageResource(R.drawable.typing1); typingTextView.setText( chatSession.getShortDisplayName() + " " + getResources() .getString(R.string.service_gui_CONTACT_PAUSED_TYPING)); setVisible = true; } if (setVisible) { typingImgView.getLayoutParams().height = LayoutParams.WRAP_CONTENT; typingImgView.setPadding(7, 0, 7, 7); typingView.setVisibility(View.VISIBLE); } else typingView.setVisibility(View.INVISIBLE); } }
package org.apache.xmlrpc; import java.io.*; import java.util.*; import java.lang.reflect.*; /** * A multithreaded, reusable XML-RPC server object. The name may be misleading * because this does not open any server sockets. Instead it is fed by passing * an XML-RPC input stream to the execute method. If you want to open a * HTTP listener, use the WebServer class instead. * * @author <a href="mailto:hannes@apache.org">Hannes Wallnoefer</a> * @author <a href="mailto:dlr@finemaltcoding.com">Daniel Rall</a> */ public class XmlRpcServer { private Hashtable handlers; private Stack pool; private int workers; /** * Construct a new XML-RPC server. You have to register handlers * to make it do something useful. */ public XmlRpcServer() { handlers = new Hashtable(); pool = new Stack(); workers = 0; } /** * Register a handler object with this name. Methods of this * objects will be callable over XML-RPC as * "handlername.methodname". For more information about XML-RPC * handlers see the <a href="../index.html#1a">main documentation * page</a>. * * @param handlername The name to identify the handler by. * @param handler The handler itself. */ public void addHandler(String handlername, Object handler) { if (handler instanceof XmlRpcHandler || handler instanceof AuthenticatedXmlRpcHandler) { handlers.put(handlername, handler); } else if (handler != null) { handlers.put(handlername, new Invoker(handler)); } } /** * Remove a handler object that was previously registered with * this server. * * @param handlername The name identifying the handler to remove. */ public void removeHandler(String handlername) { handlers.remove(handlername); } /** * Parse the request and execute the handler method, if one is * found. Returns the result as XML. The calling Java code * doesn't need to know whether the call was successful or not * since this is all packed into the response. */ public byte[] execute(InputStream is) { return execute(is, null, null); } /** * Parse the request and execute the handler method, if one is * found. If the invoked handler is AuthenticatedXmlRpcHandler, * use the credentials to authenticate the user. */ public byte[] execute(InputStream is, String user, String password) { Worker worker = getWorker(); byte[] retval = worker.execute(is, user, password); pool.push(worker); return retval; } private final Worker getWorker() { try { return(Worker) pool.pop(); } catch(EmptyStackException x) { int maxThreads = XmlRpc.getMaxThreads(); if (workers < maxThreads) { workers += 1; if (XmlRpc.debug && maxThreads - workers >= maxThreads * .95) { System.err.println("95% of XML-RPC server threads in use"); } return new Worker(); } throw new RuntimeException("System overload"); } } /** * Performs streaming, parsing, and handler execution. * Implementation is not thread-safe. */ class Worker extends XmlRpc { private static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; private Vector inParams; private ByteArrayOutputStream buffer; private XmlWriter writer; /** * Creates a new instance. */ protected Worker() { inParams = new Vector(); buffer = new ByteArrayOutputStream(); } /** * Given a request for the server, generates a response. */ public byte[] execute(InputStream is, String user, String password) { try { // Do the work return executeInternal(is, user, password); } finally { // Release most of our resources buffer.reset(); inParams.removeAllElements(); } } private byte[] executeInternal(InputStream is, String user, String password) { byte[] result; long now = 0; if (XmlRpc.debug) { now = System.currentTimeMillis(); } try { parse(is); if (XmlRpc.debug) { System.err.println("method name: " + methodName); System.err.println("inparams: " + inParams); } // check for errors from the XML parser if (errorLevel > NONE) { throw new Exception(errorMsg); } Object handler = null; String handlerName = null; int dot = methodName.lastIndexOf('.'); if (dot > -1) { handlerName = methodName.substring(0, dot); handler = handlers.get(handlerName); if (handler != null) { methodName = methodName.substring(dot + 1); } } if (handler == null) { handler = handlers.get("$default"); } if (handler == null) { if (dot > -1) { throw new Exception("RPC handler object \""+ handlerName + "\" not found and no default handler registered."); } else { throw new Exception("RPC handler object not found for \""+ methodName + "\": no default handler registered."); } } Object outParam; if (handler instanceof AuthenticatedXmlRpcHandler) { outParam =((AuthenticatedXmlRpcHandler) handler). execute(methodName, inParams, user, password); } else { outParam =((XmlRpcHandler) handler).execute( methodName, inParams); } if (XmlRpc.debug) { System.err.println("outparam = "+outParam); } writer = new XmlWriter(buffer); writeResponse(outParam, writer); writer.flush(); result = buffer.toByteArray(); } catch(Exception x) { if (XmlRpc.debug) { x.printStackTrace(); } // Ensure that if there is anything in the buffer, it // is cleared before continuing with the writing of exceptions. // It is possible that something is in the buffer // if there were an exception during the writeResponse() // call above. buffer.reset(); writer = null; try { writer = new XmlWriter(buffer); } catch(UnsupportedEncodingException encx) { System.err.println("XmlRpcServer attempted to use " + "unsupported encoding: " + encx); // NOTE: If we weren't already using the default // encoding, we could try it here. } catch(IOException iox) { System.err.println("XmlRpcServer experienced I/O error " + "writing error response: " + iox); } String message = x.toString(); // Retrieve XmlRpcException error code(if possible). int code = x instanceof XmlRpcException ? ((XmlRpcException) x).code : 0; try { writeError(code, message, writer); writer.flush(); } catch(Exception e) { // Unlikely to occur, as we just sent a struct // with an int and a string. System.err.println("Unable to send error response to " + "client: " + e); } // If we were able to create a XmlWriter, we should // have a response. if (writer != null) { result = buffer.toByteArray(); } else { result = EMPTY_BYTE_ARRAY; } } finally { if (writer != null) { try { writer.close(); } catch(IOException iox) { // This is non-fatal, but worth logging a // warning for. System.err.println("Exception closing output stream: " + iox); } } } if (XmlRpc.debug) { System.err.println("Spent "+ (System.currentTimeMillis() - now) + " millis in request"); } return result; } /** * Called when an object to be added to the argument list has * been parsed. */ void objectParsed(Object what) { inParams.addElement(what); } /** * Writes an XML-RPC response to the XML writer. */ void writeResponse(Object param, XmlWriter writer) throws XmlRpcException, IOException { writer.startElement("methodResponse"); // if (param == null) param = ""; // workaround for Frontier bug writer.startElement("params"); writer.startElement("param"); writer.writeObject(param); writer.endElement("param"); writer.endElement("params"); writer.endElement("methodResponse"); } /** * Writes an XML-RPC error response to the XML writer. */ void writeError(int code, String message, XmlWriter writer) throws XmlRpcException, IOException { // System.err.println("error: "+message); Hashtable h = new Hashtable(); h.put("faultCode", new Integer(code)); h.put("faultString", message); writer.startElement("methodResponse"); writer.startElement("fault"); writer.writeObject(h); writer.endElement("fault"); writer.endElement("methodResponse"); } } // end of inner class Worker } // XmlRpcServer /** * Introspects handlers using Java Reflection to call methods matching * a XML-RPC call. */ class Invoker implements XmlRpcHandler { private Object invokeTarget; private Class targetClass; private static Class OBJECT_CLASS; static { try { OBJECT_CLASS = Class.forName("java.lang.Object"); } catch (ClassNotFoundException e) { throw new Error(e.toString()); } } public Invoker(Object target) { invokeTarget = target; targetClass = (invokeTarget instanceof Class) ? (Class) invokeTarget : invokeTarget.getClass(); if (XmlRpc.debug) { System.err.println("Target object is " + targetClass); } } // main method, sucht methode in object, wenn gefunden dann aufrufen. public Object execute(String methodName, Vector params) throws Exception { // Array mit Classtype bilden, ObjectAry mit Values bilden Class[] argClasses = null; Object[] argValues = null; if (params != null) { argClasses = new Class[params.size()]; argValues = new Object[params.size()]; for (int i = 0; i < params.size(); i++) { argValues[i] = params.elementAt(i); if (argValues[i] instanceof Integer) { argClasses[i] = Integer.TYPE; } else if (argValues[i] instanceof Double) { argClasses[i] = Double.TYPE; } else if (argValues[i] instanceof Boolean) { argClasses[i] = Boolean.TYPE; } else { argClasses[i] = argValues[i].getClass(); } } } // Methode da ? Method method = null; if (XmlRpc.debug) { System.err.println("Searching for method: " + methodName); for (int i = 0; i < argClasses.length; i++) System.err.println("Parameter " + i + ": " + argClasses[i] + " = " + argValues[i]); } try { method = targetClass.getMethod(methodName, argClasses); } // Wenn nicht da dann entsprechende Exception returnen catch(NoSuchMethodException nsm_e) { throw nsm_e; } catch(SecurityException s_e) { throw s_e; } // our policy is to make all public methods callable except the ones defined in java.lang.Object if (method.getDeclaringClass() == OBJECT_CLASS) { throw new XmlRpcException(0, "Invoker can't call methods defined in java.lang.Object"); } // invoke Object returnValue = null; try { returnValue = method.invoke(invokeTarget, argValues); } catch(IllegalAccessException iacc_e) { throw iacc_e; } catch(IllegalArgumentException iarg_e) { throw iarg_e; } catch(InvocationTargetException it_e) { if (XmlRpc.debug) { it_e.getTargetException().printStackTrace(); } // check whether the thrown exception is XmlRpcException Throwable t = it_e.getTargetException(); if (t instanceof XmlRpcException) { throw (XmlRpcException) t; } // It is some other exception throw new Exception(t.toString()); } return returnValue; } }
package org.opencms.ui.editors; import org.opencms.file.CmsObject; import org.opencms.file.CmsResource; import org.opencms.file.types.I_CmsResourceType; import org.opencms.i18n.CmsEncoder; import org.opencms.main.CmsException; import org.opencms.main.CmsLog; import org.opencms.main.OpenCms; import org.opencms.ui.A_CmsUI; import org.opencms.ui.CmsVaadinUtils; import org.opencms.ui.apps.I_CmsAppUIContext; import org.opencms.ui.apps.Messages; import org.opencms.ui.components.CmsBrowserFrame; import org.opencms.ui.components.CmsConfirmationDialog; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.logging.Log; import com.vaadin.navigator.Navigator; import com.vaadin.navigator.ViewChangeListener; import com.vaadin.server.ExternalResource; /** * Class to extended by frame based editors.<p> */ public abstract class A_CmsFrameEditor implements I_CmsEditor, ViewChangeListener { /** Log instance for this class. */ private static final Log LOG = CmsLog.getLog(A_CmsFrameEditor.class); /** The serial version id. */ private static final long serialVersionUID = 6944345583913510988L; /** The editor state. */ protected CmsEditorStateExtension m_editorState; /** Flag indicating a view change. */ boolean m_leaving; /** The currently edited resource. */ CmsResource m_resource; /** The frame component. */ private CmsBrowserFrame m_frame; /** * @see com.vaadin.navigator.ViewChangeListener#afterViewChange(com.vaadin.navigator.ViewChangeListener.ViewChangeEvent) */ public void afterViewChange(ViewChangeEvent event) { // nothing to do } /** * @see com.vaadin.navigator.ViewChangeListener#beforeViewChange(com.vaadin.navigator.ViewChangeListener.ViewChangeEvent) */ public boolean beforeViewChange(final ViewChangeEvent event) { if (!m_leaving && m_editorState.hasChanges()) { final String target = event.getViewName(); CmsConfirmationDialog.show( CmsVaadinUtils.getMessageText(Messages.GUI_EDITOR_CLOSE_CAPTION_0), CmsVaadinUtils.getMessageText(Messages.GUI_EDITOR_CLOSE_TEXT_0), new Runnable() { public void run() { leaveEditor(event.getNavigator(), target); } }); return false; } else if (!m_leaving) { tryUnlock(); } return true; } /** * @see org.opencms.ui.editors.I_CmsEditor#initUI(org.opencms.ui.apps.I_CmsAppUIContext, org.opencms.file.CmsResource, java.lang.String, java.util.Map) */ public void initUI(I_CmsAppUIContext context, CmsResource resource, String backLink, Map<String, String> params) { m_resource = resource; CmsObject cms = A_CmsUI.getCmsObject(); String sitepath = m_resource != null ? cms.getSitePath(m_resource) : ""; String link = OpenCms.getLinkManager().substituteLinkForRootPath(cms, getEditorUri()); m_frame = new CmsBrowserFrame(); m_frame.setDescription("Editor"); m_frame.setName("edit"); try { backLink = URLEncoder.encode(backLink, CmsEncoder.ENCODING_UTF_8); } catch (UnsupportedEncodingException e1) { LOG.error(e1.getLocalizedMessage(), e1); } link += "?resource=" + sitepath + "&backlink=" + backLink; if (params != null) { for (Entry<String, String> entry : params.entrySet()) { try { link += "&" + entry.getKey() + "=" + URLEncoder.encode(entry.getValue(), CmsEncoder.ENCODING_UTF_8); } catch (UnsupportedEncodingException e) { LOG.error(e.getLocalizedMessage(), e); } } } m_frame.setSource(new ExternalResource(link)); m_frame.setSizeFull(); context.showInfoArea(false); context.hideToolbar(); m_frame.addStyleName("o-editor-frame"); context.setAppContent(m_frame); context.setAppTitle( CmsVaadinUtils.getMessageText( Messages.GUI_CONTENT_EDITOR_TITLE_2, resource == null ? "new resource" : resource.getName(), CmsResource.getParentFolder(sitepath))); m_editorState = new CmsEditorStateExtension(m_frame); } /** * @see org.opencms.ui.editors.I_CmsEditor#matchesResource(org.opencms.file.CmsResource, boolean) */ public boolean matchesResource(CmsResource resource, boolean plainText) { I_CmsResourceType type = OpenCms.getResourceManager().getResourceType(resource); return matchesType(type, plainText); } /** * Returns the editor URI.<p> * * @return the editor URI */ protected abstract String getEditorUri(); /** * Leaves the editor view.<p> * * @param navigator the navigator instance * @param target the target view */ void leaveEditor(Navigator navigator, String target) { m_leaving = true; tryUnlock(); navigator.navigateTo(target); } /** * Tries to unlock the current resource.<p> */ private void tryUnlock() { if (m_resource != null) { try { A_CmsUI.getCmsObject().unlockResource(m_resource); } catch (CmsException e) { LOG.debug("Unlocking resource " + m_resource.getRootPath() + " failed", e); } } } }
package org.apache.xmlrpc; import java.io.*; import java.util.*; import java.lang.reflect.*; /** * A multithreaded, reusable XML-RPC server object. The name may be misleading * because this does not open any server sockets. Instead it is fed by passing * an XML-RPC input stream to the execute method. If you want to open a * HTTP listener, use the WebServer class instead. * * @author <a href="mailto:hannes@apache.org">Hannes Wallnoefer</a> * @author <a href="mailto:dlr@finemaltcoding.com">Daniel Rall</a> */ public class XmlRpcServer { private Hashtable handlers; private Stack pool; private int workers; /** * Construct a new XML-RPC server. You have to register handlers * to make it do something useful. */ public XmlRpcServer() { handlers = new Hashtable(); pool = new Stack(); workers = 0; } /** * Register a handler object with this name. Methods of this * objects will be callable over XML-RPC as * "handlername.methodname". For more information about XML-RPC * handlers see the <a href="../index.html#1a">main documentation * page</a>. * * @param handlername The name to identify the handler by. * @param handler The handler itself. */ public void addHandler(String handlername, Object handler) { if (handler instanceof XmlRpcHandler || handler instanceof AuthenticatedXmlRpcHandler) { handlers.put(handlername, handler); } else if (handler != null) { handlers.put(handlername, new Invoker(handler)); } } /** * Remove a handler object that was previously registered with * this server. * * @param handlername The name identifying the handler to remove. */ public void removeHandler(String handlername) { handlers.remove(handlername); } /** * Parse the request and execute the handler method, if one is * found. Returns the result as XML. The calling Java code * doesn't need to know whether the call was successful or not * since this is all packed into the response. */ public byte[] execute(InputStream is) { return execute(is, null, null); } /** * Parse the request and execute the handler method, if one is * found. If the invoked handler is AuthenticatedXmlRpcHandler, * use the credentials to authenticate the user. */ public byte[] execute(InputStream is, String user, String password) { Worker worker = getWorker(); byte[] retval = worker.execute(is, user, password); pool.push(worker); return retval; } private final Worker getWorker() { try { return(Worker) pool.pop(); } catch(EmptyStackException x) { int maxThreads = XmlRpc.getMaxThreads(); if (workers < maxThreads) { workers += 1; if (maxThreads - workers >= maxThreads * .95) { System.err.println("95% of XML-RPC server threads in use"); } return new Worker(); } throw new RuntimeException("System overload"); } } /** * Performs streaming, parsing, and handler execution. * Implementation is not thread-safe. */ class Worker extends XmlRpc { private static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; private Vector inParams; private ByteArrayOutputStream buffer; private XmlWriter writer; /** * Creates a new instance. */ protected Worker() { inParams = new Vector(); buffer = new ByteArrayOutputStream(); } /** * Given a request for the server, generates a response. */ public byte[] execute(InputStream is, String user, String password) { try { // Do the work return executeInternal(is, user, password); } finally { // Release most of our resources buffer.reset(); inParams.removeAllElements(); } } private byte[] executeInternal(InputStream is, String user, String password) { byte[] result; long now = 0; if (XmlRpc.debug) { now = System.currentTimeMillis(); } try { parse(is); if (XmlRpc.debug) { System.err.println("method name: " + methodName); System.err.println("inparams: " + inParams); } // check for errors from the XML parser if (errorLevel > NONE) { throw new Exception(errorMsg); } Object handler = null; String handlerName = null; int dot = methodName.lastIndexOf('.'); if (dot > -1) { handlerName = methodName.substring(0, dot); handler = handlers.get(handlerName); if (handler != null) { methodName = methodName.substring(dot + 1); } } if (handler == null) { handler = handlers.get("$default"); } if (handler == null) { if (dot > -1) { throw new Exception("RPC handler object \""+ handlerName + "\" not found and no default handler registered."); } else { throw new Exception("RPC handler object not found for \""+ methodName + "\": no default handler registered."); } } Object outParam; if (handler instanceof AuthenticatedXmlRpcHandler) { outParam =((AuthenticatedXmlRpcHandler) handler). execute(methodName, inParams, user, password); } else { outParam =((XmlRpcHandler) handler).execute( methodName, inParams); } if (XmlRpc.debug) { System.err.println("outparam = "+outParam); } writer = new XmlWriter(buffer); writeResponse(outParam, writer); writer.flush(); result = buffer.toByteArray(); } catch(Exception x) { if (XmlRpc.debug) { x.printStackTrace(); } // Ensure that if there is anything in the buffer, it // is cleared before continuing with the writing of exceptions. // It is possible that something is in the buffer // if there were an exception during the writeResponse() // call above. buffer.reset(); writer = null; try { writer = new XmlWriter(buffer); } catch(UnsupportedEncodingException encx) { System.err.println("XmlRpcServer attempted to use " + "unsupported encoding: " + encx); // NOTE: If we weren't already using the default // encoding, we could try it here. } catch(IOException iox) { System.err.println("XmlRpcServer experienced I/O error " + "writing error response: " + iox); } String message = x.toString(); // Retrieve XmlRpcException error code(if possible). int code = x instanceof XmlRpcException ? ((XmlRpcException) x).code : 0; try { writeError(code, message, writer); writer.flush(); } catch(Exception e) { // Unlikely to occur, as we just sent a struct // with an int and a string. System.err.println("Unable to send error response to " + "client: " + e); } // If we were able to create a XmlWriter, we should // have a response. if (writer != null) { result = buffer.toByteArray(); } else { result = EMPTY_BYTE_ARRAY; } } finally { if (writer != null) { try { writer.close(); } catch(IOException iox) { // This is non-fatal, but worth logging a // warning for. System.err.println("Exception closing output stream: " + iox); } } } if (XmlRpc.debug) { System.err.println("Spent "+ (System.currentTimeMillis() - now) + " millis in request"); } return result; } /** * Called when an object to be added to the argument list has * been parsed. */ void objectParsed(Object what) { inParams.addElement(what); } /** * Writes an XML-RPC response to the XML writer. */ void writeResponse(Object param, XmlWriter writer) throws XmlRpcException, IOException { writer.startElement("methodResponse"); // if (param == null) param = ""; // workaround for Frontier bug writer.startElement("params"); writer.startElement("param"); writer.writeObject(param); writer.endElement("param"); writer.endElement("params"); writer.endElement("methodResponse"); } /** * Writes an XML-RPC error response to the XML writer. */ void writeError(int code, String message, XmlWriter writer) throws XmlRpcException, IOException { // System.err.println("error: "+message); Hashtable h = new Hashtable(); h.put("faultCode", new Integer(code)); h.put("faultString", message); writer.startElement("methodResponse"); writer.startElement("fault"); writer.writeObject(h); writer.endElement("fault"); writer.endElement("methodResponse"); } } // end of inner class Worker } // XmlRpcServer /** * Introspects handlers using Java Reflection to call methods matching * a XML-RPC call. */ class Invoker implements XmlRpcHandler { private Object invokeTarget; private Class targetClass; public Invoker(Object target) { invokeTarget = target; targetClass = (invokeTarget instanceof Class) ? (Class) invokeTarget : invokeTarget.getClass(); if (XmlRpc.debug) { System.err.println("Target object is " + targetClass); } } // main method, sucht methode in object, wenn gefunden dann aufrufen. public Object execute(String methodName, Vector params) throws Exception { // Array mit Classtype bilden, ObjectAry mit Values bilden Class[] argClasses = null; Object[] argValues = null; if (params != null) { argClasses = new Class[params.size()]; argValues = new Object[params.size()]; for (int i = 0; i < params.size(); i++) { argValues[i] = params.elementAt(i); if (argValues[i] instanceof Integer) { argClasses[i] = Integer.TYPE; } else if (argValues[i] instanceof Double) { argClasses[i] = Double.TYPE; } else if (argValues[i] instanceof Boolean) { argClasses[i] = Boolean.TYPE; } else { argClasses[i] = argValues[i].getClass(); } } } // Methode da ? Method method = null; if (XmlRpc.debug) { System.err.println("Searching for method: " + methodName); for (int i = 0; i < argClasses.length; i++) System.err.println("Parameter " + i + ": " + argClasses[i] + " = " + argValues[i]); } try { method = targetClass.getMethod(methodName, argClasses); } // Wenn nicht da dann entsprechende Exception returnen catch(NoSuchMethodException nsm_e) { throw nsm_e; } catch(SecurityException s_e) { throw s_e; } // Our policy is to make all public methods callable except // the ones defined in java.lang.Object. if (method.getDeclaringClass() == Object.class) { throw new XmlRpcException(0, "Invoker can't call methods " + "defined in java.lang.Object"); } // invoke Object returnValue = null; try { returnValue = method.invoke(invokeTarget, argValues); } catch(IllegalAccessException iacc_e) { throw iacc_e; } catch(IllegalArgumentException iarg_e) { throw iarg_e; } catch(InvocationTargetException it_e) { if (XmlRpc.debug) { it_e.getTargetException().printStackTrace(); } // check whether the thrown exception is XmlRpcException Throwable t = it_e.getTargetException(); if (t instanceof XmlRpcException) { throw (XmlRpcException) t; } // It is some other exception throw new Exception(t.toString()); } return returnValue; } }
package org.osjava.norbert; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * Contains a series of Rules. It then runs a path against these * to decide if it is allowed or not. */ // TODO: Make this package private? class RulesEngine { private List rules; public RulesEngine() { this.rules = new ArrayList(); } public void allowPath(String path) { add( new AllowedRule(path) ); } public void disallowPath(String path) { add( new DisallowedRule(path) ); } public void add(Rule rule) { this.rules.add(rule); } /** * Run each Rule in series on the path. * If a Rule returns a Boolean, return that. * If null is returned, move on. * When no more rules are left, return true. */ public boolean isAllowed(String path) { Iterator iterator = this.rules.iterator(); while(iterator.hasNext()) { Rule rule = (Rule)iterator.next(); Boolean test = rule.isAllowed(path); if(test != null) { return test.booleanValue(); } } return true; } public boolean isEmpty() { return this.rules.isEmpty(); } public String toString() { return "RulesEngine: " + this.rules; } }
package eg.ui; import java.awt.Color; import java.awt.BorderLayout; import java.awt.Font; import java.awt.print.*; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.text.MessageFormat; import javax.swing.JTextPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JScrollBar; import javax.swing.KeyStroke; import javax.swing.text.DefaultCaret; import javax.swing.border.LineBorder; import javax.swing.border.MatteBorder; //--Eadgyth--// import eg.Constants; import eg.utils.FileUtils; import eg.utils.ScreenParams; /** * Defines the panel that contains the text area for editing text and the * area that displays line numbers. * <p> * The usual shortcuts for cut, copy, paste and select text are disabled */ public final class EditArea { private final static LineBorder WHITE_BORDER = new LineBorder(Color.WHITE, 5); private final JPanel content = new JPanel(new BorderLayout()); private final JTextPane textArea = new JTextPane(); private final JTextPane lineNrArea = new JTextPane(); private final JPanel disabledWordwrapPnl = new JPanel(new BorderLayout()); private final JPanel enabledWordwrapPnl = new JPanel(); private final JScrollPane wordwrapScoll = new JScrollPane( JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); private final JScrollPane noWordwrapScroll = new JScrollPane( JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); private final JScrollPane linkedLineNrScroll = new JScrollPane( JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); private final JScrollPane lineNrScroll = new JScrollPane( JScrollPane.VERTICAL_SCROLLBAR_NEVER, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); private boolean isWordwrap; private int scrollPos; /** * @param isWordwrap the boolean that is true to enable, false to * disable wordwrap * @param isLineNumbers the boolean that is true to show, false to * hide line numbers. This is effectless if isWordwrap is true. * @param font the name of the initial font * @param fontSize the size of the initial font */ public EditArea(boolean isWordwrap, boolean isLineNumbers, String font, int fontSize) { removeShortCuts(); initEditAreaPnl(); initTextArea(); initLineNrArea(); initLinkedLineNrScrolling(); initWordwrapScrolling(); initNoWordwrapScrolling(); setFont(font, fontSize); if (isWordwrap) { enableWordwrap(); } else { disableWordwrap(isLineNumbers); } textArea.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { textArea.getCaret().setSelectionVisible(true); } }); } /** * Gets this JPanel which contains the text area for editing * text and the text area that shows line numbers * * @return the JPanel */ public JPanel content() { return content; } /** * Gets this text area that displays the editable text * * @return the text area */ public JTextPane textArea() { return textArea; } /** * Gets this text area that displays the line numbers * * @return the text area */ public JTextPane lineNrArea() { return lineNrArea; } /** * Gets this implemented method that is specified in * <code>LineNrWidthAdaptable</code> * * @return the implemented method */ public LineNrWidthAdaptable lineNrWidth() { return (i, j) -> adaptLineNrWidth(i, j); } /** * Enables wordwrap. Invoking this method also hides the area that * displays line numbers */ public void enableWordwrap() { enableWordwrapImpl(); } /** * Disables wordwrap and makes the area that displays line numbers * visible if the specified boolean <code>lineNumbers</code> is true * * @param lineNumbers the boolean value */ public void disableWordwrap(boolean lineNumbers) { if (lineNumbers) { showLineNumbersImpl(); } else { hideLineNumbersImpl(); } } /** * Returns the boolane that true if wordwrap is enabled * * @return the boolean value */ public boolean isWordwrap() { return isWordwrap; } public void showLineNumbers(boolean b) { if (isWordwrap) { throw new IllegalStateException( "Wordwrap is currently enabled"); } if (b) { showLineNumbersImpl(); } else { hideLineNumbersImpl(); } } /** * Sets the font * * @param font the name of the font * @param fontSize the font size */ public void setFont(String font, int fontSize) { Font fontNew = new Font(font, Font.PLAIN, ScreenParams.scaledSize(fontSize)); lineNrArea.setFont(fontNew); textArea.setFont(fontNew); revalidate(); } /** * Prints the text content in this text area to a printer */ public void print() { try { MessageFormat footerFormat = new MessageFormat("Page {0}"); textArea.print(null, footerFormat, true, null, null, false); } catch (PrinterException e) { FileUtils.logStack(e); } } //--private--/ private void showLineNumbersImpl() { removeCenterComponent(); disabledWordwrapPnl.add(textArea, BorderLayout.CENTER); linkedLineNrScroll.setViewportView(disabledWordwrapPnl); content.add(lineNrScroll, BorderLayout.WEST); content.add(linkedLineNrScroll, BorderLayout.CENTER); setScrollPos(linkedLineNrScroll); textArea.requestFocusInWindow(); revalidate(); isWordwrap = false; } private void hideLineNumbersImpl() { content.remove(lineNrScroll); removeCenterComponent(); disabledWordwrapPnl.add(textArea, BorderLayout.CENTER); noWordwrapScroll.setViewportView(disabledWordwrapPnl); content.add(noWordwrapScroll, BorderLayout.CENTER); setScrollPos(noWordwrapScroll); textArea.requestFocusInWindow(); revalidate(); isWordwrap = false; } private void enableWordwrapImpl() { content.remove(lineNrScroll); removeCenterComponent(); wordwrapScoll.setViewportView(textArea); content.add(wordwrapScoll, BorderLayout.CENTER); setScrollPos(wordwrapScoll); textArea.requestFocusInWindow(); revalidate(); isWordwrap = true; } private void adaptLineNrWidth(int prevLineNr, int lineNr) { if ((int) Math.log10(prevLineNr) - (int) Math.log10(lineNr) != 0) { revalidate(); } } private void revalidate() { content.revalidate(); content.repaint(); } private void removeCenterComponent() { BorderLayout layout = (BorderLayout) content.getLayout(); JScrollPane c = (JScrollPane) layout.getLayoutComponent(BorderLayout.CENTER); if (c != null) { scrollPos = c.getVerticalScrollBar().getValue(); content.remove(c); } } private void setScrollPos(JScrollPane pane) { JScrollBar bar = pane.getVerticalScrollBar(); bar.setValue(scrollPos); } private void initEditAreaPnl() { content.setBorder(Constants.GRAY_BORDER); } private void initTextArea() { textArea.setBorder(WHITE_BORDER); } private void initLineNrArea() { lineNrArea.setBorder(WHITE_BORDER); lineNrArea.setEditable(false); lineNrArea.setFocusable(false); DefaultCaret caret = (DefaultCaret) lineNrArea.getCaret(); caret.setUpdatePolicy(DefaultCaret.NEVER_UPDATE); } private void initWordwrapScrolling() { wordwrapScoll.getVerticalScrollBar().setUnitIncrement(15); wordwrapScoll.setBorder(null); wordwrapScoll.setViewportView(enabledWordwrapPnl); } private void initNoWordwrapScrolling() { noWordwrapScroll.getVerticalScrollBar().setUnitIncrement(15); noWordwrapScroll.setBorder(null); noWordwrapScroll.setViewportView(disabledWordwrapPnl); } private void initLinkedLineNrScrolling() { linkedLineNrScroll.getVerticalScrollBar().setUnitIncrement(15); linkedLineNrScroll.setBorder(null); linkedLineNrScroll.setViewportView(disabledWordwrapPnl); lineNrScroll.setViewportView(lineNrArea); lineNrScroll.setBorder(null); lineNrScroll.setBorder(new MatteBorder(0, 0, 0, 1, Constants.GRAY)); lineNrScroll.getVerticalScrollBar().setModel (linkedLineNrScroll.getVerticalScrollBar().getModel()); } private void removeShortCuts() { KeyStroke ksSelAll = KeyStroke.getKeyStroke("control pressed a"); textArea.getInputMap().put(ksSelAll, "dummy"); KeyStroke ksCut = KeyStroke.getKeyStroke("control pressed X"); textArea.getInputMap().put(ksCut, "dummy"); KeyStroke ksCopy = KeyStroke.getKeyStroke("control pressed C"); textArea.getInputMap().put(ksCopy, "dummy"); KeyStroke ksPaste = KeyStroke.getKeyStroke("control pressed V"); textArea.getInputMap().put(ksPaste, "dummy"); } }
package element; import interaction.Actions; import interaction.Deplacements; import interfaceGraphique.VueElement; import java.rmi.RemoteException; import java.util.Hashtable; import utilitaires.Calculs; public class Maitre extends Personnage { private static final long serialVersionUID = 1L; public Maitre(String nom) { super(nom, 2147483647, 2147483647); } public void strategie(VueElement ve, Hashtable<Integer,VueElement> voisins, Integer refRMI) throws RemoteException { Actions actions = new Actions(ve, voisins); //je recupere les voisins (distance < 10) Deplacements deplacements = new Deplacements(ve,voisins); if (0 == voisins.size()) { // je n'ai pas de voisins, j'erre parler("J'erre...", ve); deplacements.seDirigerVers(0); //errer } else { VueElement cible = Calculs.chercherElementProche(ve, voisins); int distPlusProche = Calculs.distanceChebyshev(ve.getPoint(), cible.getPoint()); int refPlusProche = cible.getRef(); Element elemPlusProche = cible.getControleur().getElement(); // dans la meme equipe ? boolean memeEquipe = false; if(elemPlusProche instanceof Personnage) { memeEquipe = (getLeader() != -1 && getLeader() == ((Personnage) elemPlusProche).getLeader()) || // meme leader getLeader() == refPlusProche || // cible est le leader de this ((Personnage) elemPlusProche).getLeader() == refRMI; // this est le leader de cible } if(distPlusProche <= 2) { // si suffisamment proches if(elemPlusProche instanceof Potion) { // potion // Stats max donc on ne ramasse pas la potion parler("J'erre...", ve); deplacements.seDirigerVers(0); //errer } else { // personnage if(!memeEquipe) { // duel seulement si pas dans la meme equipe (pas de coup d'etat possible dans ce cas) // duel parler("Je fais un duel avec " + refPlusProche, ve); actions.interaction(refRMI, refPlusProche, ve.getControleur().getArene()); } else { parler("J'erre...", ve); deplacements.seDirigerVers(0); // errer } } } else { // si voisins, mais plus eloignes if(!memeEquipe) { // potion ou enemmi // je vais vers le plus proche parler("Je vais vers mon voisin " + refPlusProche, ve); deplacements.seDirigerVers(refPlusProche); } else { parler("J'erre...", ve); deplacements.seDirigerVers(0); // errer } } } } }
package pitt.search.semanticvectors; import java.lang.Integer; import java.lang.Math; import java.util.ArrayList; import java.util.LinkedList; import java.util.Enumeration; import java.util.Random; import pitt.search.semanticvectors.ObjectVector; /** * This class provides standard vector methods, e.g., cosine measure, * normalization, tensor utils. */ public class VectorUtils{ static void printVector(float[] vector) { for (int i = 0; i < vector.length - 1; ++i) { System.out.print(vector[i] + "|"); } // Print last coordinate followed by newline, not "|". System.out.println(vector[vector.length - 1]); } /** * Check whether a vector is all zeros. */ static final float kTolerance = 0.0001f; public static boolean isZeroVector(float[] vec) { for (int i = 0; i < vec.length; ++i) { if (Math.abs(vec[i]) > kTolerance) { return false; } } return true; } public static boolean isZeroTensor(float[][] ten) { for (int i = 0; i < ten.length; ++i) { if (!isZeroVector(ten[i])) { return false; } } return true; } public static float[][] createZeroTensor(int dim) { float[][] newTensor = new float[dim][dim]; for (int i = 0; i < dim; ++i) { for (int j = 0; j < dim; ++j) { newTensor[i][j] = 0; } } return newTensor; } /** * Returns the scalar product (dot product) of two vectors * for normalized vectors this is the same as cosine similarity. * @param vec1 First vector. * @param vec2 Second vector. */ public static float scalarProduct(float[] vec1, float[] vec2){ float result = 0; for (int i = 0; i < vec1.length; ++i) { result += vec1[i] * vec2[i]; } return result; } /* Euclidean distance metric */ public static float euclideanDistance(float[] vec1, float[] vec2){ float distance=0; for (int i = 0; i < vec1.length; ++i) { distance += (vec1[i] - vec2[i]) * (vec1[i] - vec2[i]); } return (float)Math.sqrt(distance); } /** * Get nearest vector from list of candidates. * @param vector The vector whose nearest neighbor is to be found. * @param candidates The list of vectors from whoe the nearest is to be chosen. * @return Integer value referencing the position in the candidate list of the nearest vector. */ public static int getNearestVector(float[] vector, float[][] candidates) { int nearest = 0; float minDist = euclideanDistance(vector, candidates[0]); float thisDist = minDist; for (int i = 1; i < candidates.length; ++i) { thisDist = euclideanDistance(vector, candidates[i]); if (thisDist < minDist) { minDist = thisDist; nearest = i; } } return nearest; } /** * Returns the normalized version of a vector, i.e. same direction, * unit length. * @param vec Vector whose normalized version is requested. */ public static float[] getNormalizedVector(float[] vec){ float norm = 0; int i; float[] tmpVec = new float[vec.length]; for (i = 0; i < vec.length; ++i) { tmpVec[i] = vec[i]; } for (i = 0; i < tmpVec.length; ++i) { norm += tmpVec[i]*tmpVec[i]; } norm = (float)Math.sqrt(norm); for (i = 0; i < tmpVec.length; ++i) { tmpVec[i] = tmpVec[i]/norm; } return tmpVec; } /** * Returns the normalized version of a 2 tensor, i.e. an array of * arrays of floats. */ public static float[][] getNormalizedTensor(float[][] tensor){ int dim = tensor[0].length; float[][] normedTensor = new float[dim][dim]; float norm = (float)Math.sqrt(getInnerProduct(tensor, tensor)); for (int i = 0; i < dim; ++i) { for (int j = 0; j < dim; ++j) { normedTensor[i][j] = tensor[i][j]/norm; } } return normedTensor; } /** * Returns a 2-tensor which is the outer product of 2 vectors. */ public static float[][] getOuterProduct(float[] vec1, float[] vec2) { int dim = vec1.length; float[][] outProd = new float[dim][dim]; for (int i=0; i<dim; ++i) { for (int j=0; j<dim; ++j) { outProd[i][j] = vec1[i] * vec2[j]; } } return outProd; } /** * Returns the sum of two tensors. */ public static float[][] getTensorSum(float[][] ten1, float[][] ten2) { int dim = ten1[0].length; float[][] result = new float[dim][dim]; for (int i = 0; i < dim; ++i) { for (int j = 0; j < dim; ++j) { result[i][j] += ten1[i][j] + ten2[i][j]; } } return result; } /** * Returns the inner product of two tensors. */ public static float getInnerProduct(float[][] ten1, float[][]ten2){ float result = 0; int dim = ten1[0].length; for (int i = 0; i < dim; ++i) { for (int j = 0; j < dim; ++j) { result += ten1[i][j] * ten2[j][i]; } } return result; } /** * Returns the convolution of two vectors; see Plate, * Holographic Reduced Representation, p. 76. */ public static float[] getConvolutionFromTensor(float[][] tensor){ int dim = tensor.length; float[] conv = new float[2*dim - 1]; for (int i = 0; i < dim; ++i) { conv[i] = 0; conv[conv.length - 1 - i] = 0; for (int j = 0; j <= i; ++j) { // Count each pair of diagonals. // TODO(widdows): There may be transpose conventions to check. conv[i] += tensor[i-j][j]; if (i != dim - 1) { // Avoid counting lead diagonal twice. conv[conv.length - 1 - i] = tensor[dim-1-i+j][dim-1-j]; } } } return VectorUtils.getNormalizedVector(conv); } /** * Returns the convolution of two vectors; see Plate, * Holographic Reduced Representation, p. 76. */ public static float[] getConvolutionFromVectors(float[] vec1, float[] vec2) { int dim = vec1.length; float[] conv = new float[2 * dim - 1]; for (int i = 0; i < dim; ++i) { conv[i] = 0; conv[conv.length - 1 - i] = 0; for (int j = 0; j <= i; ++j) { // Count each pair of diagonals. conv[i] += vec1[i-j] * vec2[j]; if (i != dim - 1) { // Avoid counting lead diagonal twice. conv[conv.length - 1 - i] = vec1[dim-1-i+j] * vec2[dim-1-j]; } } } return VectorUtils.getNormalizedVector(conv); } /** * Sums the scalar products of a vector and each member of a list of * vectors. If the list is orthonormal, this gives the cosine * similarity of the test vector and the subspace generated by the * orthonormal vectors. */ public static float getSumScalarProduct(float[] testVector, ArrayList<float[]> vectors) { float score = 0; for (int i = 0; i < vectors.size(); ++i) { score += scalarProduct(vectors.get(i), testVector); } return score; } /** * The orthogonalize function takes an array of vectors and * orthogonalizes them using the Gram-Schmidt process. The vectors * are orthogonalized in place, so there is no return value. Note * that the output of this function is order dependent, in * particular, the jth vector in the array will be made orthogonal * to all the previous vectors. Since this means that the last * vector is orthogonal to all the others, this can be used as a * negation function to give an vector for * vectors[last] NOT (vectors[0] OR ... OR vectors[last - 1]. * * @param vectors ArrayList of vectors (which are themselves arrays of * floats) to be orthogonalized in place. */ public static boolean orthogonalizeVectors(ArrayList<float[]> vectors) { vectors.set(0, getNormalizedVector(vectors.get(0))); // Go up through vectors in turn, parameterized by k. for (int k = 0; k < vectors.size(); ++k) { float[] kthVector = vectors.get(k); if (kthVector.length != ObjectVector.vecLength) { System.err.println("In orthogonalizeVector: not all vectors have required dimension."); return false; } // Go up to vector k, parameterized by j. for (int j = 0; j < k; ++j) { float[] jthVector = vectors.get(j); float dotProduct = scalarProduct(kthVector, jthVector); // Subtract relevant amount from kth vector. for (int i = 0; i < ObjectVector.vecLength; ++i) { kthVector[i] -= dotProduct * jthVector[i]; } } // Normalize the vector we're working on. vectors.set(k, getNormalizedVector(kthVector)); } return true; } /** * Generates a basic sparse vector (dimension = ObjectVector.vecLength) * with mainly zeros and some 1 and -1 entries (seedLength/2 of each) * each vector is an array of length seedLength containing 1+ the index of a non-zero * value, signed according to whether this is a + or -1. * <br> * e.g. +20 would indicate a +1 in position 19, +1 would indicate a +1 in position 0. * -20 would indicate a -1 in position 19, -1 would indicate a -1 in position 0. * <br> * The extra offset of +1 is because position 0 would be unsigned, * and would therefore be wasted. Consequently we've chosen to make * the code slightly more complicated to make the implementation * slightly more space efficient. * * @return Sparse representation of basic ternary vector. Array of * short signed integers, indices to the array locations where a * +/-1 entry is located. */ public static short[] generateRandomVector(int seedLength, Random random) { boolean[] randVector = new boolean[ObjectVector.vecLength]; short[] randIndex = new short[seedLength]; int testPlace, entryCount = 0; /* put in +1 entries */ while (entryCount < seedLength / 2) { testPlace = random.nextInt(ObjectVector.vecLength); if (!randVector[testPlace]) { randVector[testPlace] = true; randIndex[entryCount] = new Integer(testPlace + 1).shortValue(); entryCount++; } } /* put in -1 entries */ while (entryCount < seedLength) { testPlace = random.nextInt (ObjectVector.vecLength); if (!randVector[testPlace]) { randVector[testPlace] = true; randIndex[entryCount] = new Integer((1 + testPlace) * -1).shortValue(); entryCount++; } } return randIndex; } /** * Given an array of floats, return an array of indices to the n largest values. */ public static short[] getNLargestPositions(float[] values, int numResults) { // TODO(dwiddows): Find some apprpriate "CHECK" function to use here. if (numResults > values.length) { System.err.println("Asking for highest " + numResults + " entries out of only " + values.length); System.exit(-1); } LinkedList<Integer> largestPositions = new LinkedList<Integer>(); // Initialize result list if just starting. largestPositions.add(new Integer(0)); float threshold = values[0]; for (int i = 0; i < values.length; ++i) { if (values[i] > threshold || largestPositions.size() < numResults) { boolean added = false; for (int j = 0; j < largestPositions.size(); ++j) { // Add to list if this is right place. if (values[i] > values[largestPositions.get(j).intValue()] && added == false) { largestPositions.add(j, new Integer(i)); added = true; } } // Prune list if there are already numResults. if (largestPositions.size() > numResults) { largestPositions.removeLast(); threshold = values[largestPositions.getLast().intValue()]; } else { if (added == false) { largestPositions.add(new Integer(i)); } } } } // CHECK if (largestPositions.size() != numResults) { System.err.println("We have " + largestPositions.size() + " results. Expecting " + numResults); System.exit(-1); } Object[] intArray = largestPositions.toArray(); short[] results = new short[numResults]; for (int i = 0; i < numResults; ++i) { results[i] = ((Integer)intArray[i]).shortValue(); } return results; } /** * Take a vector of floats and simplify by quantizing to a sparse format. Lossy. */ public static short[] floatVectorToSparseVector(float[] floatVector, int seedLength) { // TODO(dwiddows): Find some appropriate "CHECK" function to use here. if (seedLength > floatVector.length) { System.err.println("Asking sparse form of length " + seedLength + " from float vector of length " + floatVector.length); System.exit(-1); } short[] topN = getNLargestPositions(floatVector, seedLength/2); float[] inverseVector = new float[floatVector.length]; for (int i = 0; i < floatVector.length; ++i) { inverseVector[i] = -1 * floatVector[i]; } short[] lowN = getNLargestPositions(inverseVector, seedLength/2); short[] sparseVector = new short[seedLength]; for (int i = 0; i < seedLength/2; ++i) { sparseVector[i] = new Integer(topN[i] + 1).shortValue(); sparseVector[seedLength/2 + i] = new Integer(-1 * (lowN[i] + 1)).shortValue(); } return sparseVector; } /** * Translate sparse format (listing of offsets) into full float vector. * The random vector is in condensed (signed index + 1) * representation, and is converted to a full float vector by adding -1 or +1 to the * location (index - 1) according to the sign of the index. * (The -1 and +1 are necessary because there is no signed * version of 0, so we'd have no way of telling that the * zeroth position in the array should be plus or minus 1.) */ public static float[] sparseVectorToFloatVector(short[] sparseVector, int dimension) { float[] output = new float[dimension]; for (int i = 0; i < dimension; ++i) { output[i] = 0; } for (int i = 0; i < sparseVector.length; ++i) { output[Math.abs(sparseVector[i]) - 1] = Math.signum(sparseVector[i]); } return output; } /** * This method implements rotation as a form of vector permutation, * as described in Sahlgren, Holst and Kanervi 2008. This supports * encoding of N-grams, as rotating random vectors serves as a convenient * alternative to random permutation * @param indexVector the sparse vector to be permuted * @param rotation the direction and number of places to rotate * @return sparse vector with permutation */ public static short[] permuteSparseVector (short[] indexVector, int rotation) {short[] permutedVector = new short[indexVector.length]; for (int x =0; x < permutedVector.length; x++) { int max = ObjectVector.vecLength; int newIndex = Math.abs(indexVector[x]); int sign = Integer.signum(indexVector[x]); //rotate vector newIndex += rotation; if (newIndex > max) newIndex = newIndex - max; if (newIndex < 1) newIndex = max + newIndex; newIndex = newIndex * sign; permutedVector[x] = (short) newIndex; } return permutedVector; } /** * This method implements rotation as a form of vector permutation, * as described in Sahlgren, Holst and Kanervi 2008. This supports * encoding of N-grams, as rotating random vectors serves as a convenient * alternative to random permutation * @param indexVector the sparse vector to be permuted * @param rotation the direction and number of places to rotate * @return vector with permutation */ public static float[] permuteVector (float[] indexVector, int rotation) { //correct for unlikely possibility that rotation specified > indexVector.length if (Math.abs(rotation) > indexVector.length) rotation = rotation % indexVector.length; float[] permutedVector = new float[indexVector.length]; for (int x =0; x < permutedVector.length; x++) { int max = indexVector.length; int newIndex = x - rotation; if (newIndex >= max) newIndex = newIndex - max; if (newIndex < 0) newIndex = max + newIndex; permutedVector[x] = indexVector[newIndex]; } return permutedVector; } }
package luggage.controllers; import java.net.URL; import java.util.List; import java.util.ResourceBundle; import javafx.application.Platform; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.control.Button; import javafx.scene.control.ChoiceBox; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.TextField; import javafx.scene.control.cell.PropertyValueFactory; import javafx.stage.Stage; import luggage.Debug; import luggage.MainActivity; import luggage.database.models.CustomerModel; import luggage.database.models.InsurerModel; import luggage.database.models.LocationModel; import luggage.database.models.UserModel; import luggage.database.models.Model; import luggage.helpers.StageHelper; import luggage.security.Encryption; import org.controlsfx.control.action.Action; import org.controlsfx.dialog.Dialog; import org.controlsfx.dialog.Dialogs; /** * UsersController * * Controller for users/list.fxml * * @package luggage.controllers * @author Alexander + Nick */ public class UsersController extends BaseController implements Initializable { @FXML private TableView listTableView; @FXML private TableColumn listTableViewUsername; @FXML private TableColumn listTableViewName; @FXML private TableColumn listTableViewInsurer; @FXML private TableColumn listTableViewAddress; @FXML private TableColumn listTableViewPhone; @FXML private TableColumn listTableViewEmail; @FXML private TableColumn listTableViewRole; @FXML private TextField listSearchField; @FXML private Button newAdd; @FXML private Button newReset; @FXML private Button newCancel; /* *all ADD fields */ @FXML private TextField addAddress; @FXML private TextField addPostalcode; @FXML private TextField addResidence; @FXML private ChoiceBox<LocationModel> addWorkplace; @FXML private ChoiceBox addRole; @FXML private TextField addTelephone; @FXML private TextField addMobile; @FXML private TextField addFirstname; @FXML private TextField addPrefix; @FXML private TextField addLastname; @FXML private ChoiceBox addGender; @FXML private TextField addUsername; @FXML private TextField addPassword; @FXML private TextField addPasswordRepeat; /* * all EDIT fields */ @FXML private Button editAdd; @FXML private TextField editFirstname; @FXML private TextField editPrefix; @FXML private TextField editLastname; @FXML private TextField editUsername; @FXML private TextField editPassword; @FXML private TextField editPasswordRepeat; @FXML private TextField editAddress; @FXML private TextField editPostalcode; @FXML private TextField editResidence; @FXML private TextField editTelephone; @FXML private TextField editMobile; @FXML private ChoiceBox editGender; @FXML private ChoiceBox editRole; @FXML private ChoiceBox<LocationModel> editWorkplace; @FXML private Button editReset; @FXML private Button editSave; @FXML private Button editCancel; /** * VIEW ELEMENTS */ @FXML private Button listView; @FXML private Button listEdit; @FXML private Button listRemove; @FXML private TextField viewUsername; @FXML private TextField viewPassword; @FXML private TextField viewFirstname; @FXML private TextField viewPrefix; @FXML private TextField viewLastname; @FXML private TextField viewAddress; @FXML private TextField viewPostalcode; @FXML private TextField viewResidence; @FXML private TextField viewTelephone; @FXML private TextField viewMobile; @FXML private ChoiceBox viewRole; @FXML private ChoiceBox viewGender; @FXML private ChoiceBox viewWorkplace; @FXML private Button viewClose; private ObservableList<UserModel> listData = FXCollections.observableArrayList(); private final ObservableList<LocationModel> workplaceData = FXCollections.observableArrayList(); @FXML public void listOnSearch() { String[] keywords = listSearchField.getText().split("\\s+"); String[] params = new String[3 * keywords.length]; boolean firstColumn = true; String query = ""; for (int i = 0; i < keywords.length; i++) { if (firstColumn) { params[0 + i] = "%" + keywords[i] + "%"; query += "username LIKE ?"; } else { params[0 + i] = "%" + keywords[i] + "%"; query += " OR username LIKE ?"; } params[1 + i] = "%" + keywords[i] + "%"; query += " OR firstname LIKE ?"; params[2 + i] = "%" + keywords[i] + "%"; query += " OR lastname LIKE ?"; firstColumn = false; } listResetTableView(query, params); } private ObservableList<UserModel> data = FXCollections.observableArrayList(); /** * Called on controller start * * @param url * @param rb */ @Override public void initialize(URL url, ResourceBundle rb) { Platform.runLater(new Runnable() { @Override public void run() { Debug.print("USERS CONTROLLER // List if (listTableView != null) { listResetTableView("", new String[0]); listEdit.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listRemove.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listView.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listTableView.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); } // Add if (addGender != null && addRole != null && addWorkplace != null) { setAddChoiceBox(); } //Edit if (editGender != null && editRole != null && editWorkplace != null) { setEditFields(); setEditChoiceBoxes(); } // View if (viewGender != null && viewRole != null && viewWorkplace != null) { setViewChoiceBoxes(); setViewFields(); } } }); } public void setViewChoiceBoxes() { viewGender.setItems(FXCollections.observableArrayList( "MALE", "FEMALE", "OTHER" )); viewRole.setItems(FXCollections.observableArrayList( "EMPLOYEE", "MANAGER", "MODERATOR", "SUPER" )); LocationModel locations = new LocationModel(); List<Model> allLocations = locations.findAll("", new String[0]); int locationId = new UserModel(MainActivity.viewId).getLocation().getId(); for(Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; if(location.getId() == locationId) { selectedWorkplace = location; } workplaceData.add(location); } viewWorkplace.setItems(workplaceData); } public void setAddChoiceBox() { addGender.setItems(FXCollections.observableArrayList( "MALE", "FEMALE", "OTHER" )); addRole.setItems(FXCollections.observableArrayList( "EMPLOYEE", "MANAGER", "MODERATOR", "SUPER" )); LocationModel locations = new LocationModel(); List<Model> allLocations = locations.findAll("", new String[0]); for(Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; workplaceData.add(location); } addWorkplace.setItems(workplaceData); } public void setEditFields() { UserModel user = new UserModel(MainActivity.editId); editUsername.setText(user.getFirstname()); //editPassword.setText(user.getPassword()); //editPasswordRepeat.setText(user.getPassword()); editFirstname.setText(user.getFirstname()); editPrefix.setText(user.getPrefix()); editLastname.setText(user.getLastname()); editAddress.setText(user.getAddress()); editPostalcode.setText(user.getPostalcode()); editResidence.setText(user.getResidence()); editTelephone.setText(user.getTelephone()); editMobile.setText(user.getMobile()); editGender.getSelectionModel().select(user.getGender().toUpperCase()); editRole.getSelectionModel().select(user.getRole().toUpperCase()); editWorkplace.getSelectionModel().select(user.getLocation()); } public LocationModel selectedWorkplace; public void setEditChoiceBoxes() { editGender.setItems(FXCollections.observableArrayList( "MALE", "FEMALE", "OTHER" )); editRole.setItems(FXCollections.observableArrayList( "EMPLOYEE", "MANAGER", "MODERATOR", "SUPER" )); LocationModel locations = new LocationModel(); List<Model> allLocations = locations.findAll("", new String[0]); int locationId = new UserModel(MainActivity.editId).getLocation().getId(); for(Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; if(location.getId() == locationId) { selectedWorkplace = location; } workplaceData.add(location); } editWorkplace.setItems(workplaceData); } public void listResetTableView(String where, String... params) { UserModel users = new UserModel(); List<Model> allUsers = users.findAll(where, params); data = FXCollections.observableArrayList(); for (Model allUser : allUsers) { UserModel user = (UserModel) allUser; data.add(user); } listTableViewUsername.setCellValueFactory(new PropertyValueFactory("username")); listTableViewName.setCellValueFactory(new PropertyValueFactory("fullname")); // listTableViewName.setCellValueFactory(new PropertyValueFactory("mobile")); listTableViewRole.setCellValueFactory(new PropertyValueFactory("role")); listTableView.setItems(data); } @FXML public void listNew() { StageHelper.addStage("users/Add new user", this, false, true); } @FXML public void listEdit() { UserModel user = (UserModel) listTableView.getSelectionModel().getSelectedItem(); if (user == null) { return; } MainActivity.editId = user.getId(); StageHelper.addStage("users/Edit selected user", this, false, true); } @FXML public void listRemove() { Stage removeStage = (Stage) listTableView.getScene().getWindow(); Action response = Dialogs.create().owner(removeStage) .title("Are you sure you want to delete this item?") //.masthead("Are you sure you want to delete this item? 2") .message("Are you sure you want to delete this item?") .actions(Dialog.ACTION_OK, Dialog.ACTION_CANCEL) .showWarning(); if (response == Dialog.ACTION_OK) { UserModel user = (UserModel) listTableView.getSelectionModel().getSelectedItem(); if (user == null) { return; } user.delete(); listOnSearch(); } else { return; } } @FXML public void listView() { UserModel user = (UserModel) listTableView.getSelectionModel().getSelectedItem(); if (user == null) { return; } MainActivity.viewId = user.getId(); StageHelper.addStage("users/Detail view", this, false, true); } public void newCancel() { Stage addStage = (Stage) newCancel.getScene().getWindow(); StageHelper.closeStage(addStage); } public void newReset() { addPassword.setText(""); addPasswordRepeat.setText(""); addUsername.setText(""); addFirstname.setText(""); addPrefix.setText(""); addLastname.setText(""); addAddress.setText(""); addPostalcode.setText(""); addResidence.setText(""); addTelephone.setText(""); addMobile.setText(""); addGender.setValue(null); addRole.setValue("EMPLOYEE"); addWorkplace.setValue(null); } public void newSave() { if (addGender.getSelectionModel().getSelectedItem() == null || addRole.getSelectionModel().getSelectedItem() == null || addWorkplace.getSelectionModel().getSelectedItem() == null || addPassword != addPasswordRepeat) { return; } // if (addRole.getSelectionModel().getSelectedItem() == null) { // return; // if (addWorkplace.getSelectionModel().getSelectedItem() == null) { // return; // if (addPassword != addPasswordRepeat) { // return; UserModel users = new UserModel(); users.setPassword(Encryption.hash(addPassword.getText())); users.setUsername(addUsername.getText()); users.setFirstname(addFirstname.getText()); users.setPrefix(addPrefix.getText()); users.setLastname(addLastname.getText()); users.setGender(addGender.getSelectionModel().getSelectedItem().toString()); users.setAddress(addAddress.getText()); users.setPostalcode(addPostalcode.getText()); users.setResidence(addResidence.getText()); users.setLocationId(Integer.toString(addWorkplace.getSelectionModel().getSelectedItem().getId())); users.setRole(addRole.getSelectionModel().getSelectedItem().toString()); users.setTelephone(addTelephone.getText()); users.setMobile(addMobile.getText()); users.save(); UsersController usersController = (UsersController) StageHelper.callbackController; usersController.listOnSearch(); newCancel(); } public void editReset() { editUsername.setText(""); editPassword.setText(""); editPasswordRepeat.setText(""); editFirstname.setText(""); editPrefix.setText(""); editLastname.setText(""); editPostalcode.setText(""); editAddress.setText(""); editResidence.setText(""); editTelephone.setText(""); editMobile.setText(""); editGender.setValue(null); editRole.setValue("EMPLOYEE"); editWorkplace.setValue(null); } public void editSave() { if (editGender.getSelectionModel().getSelectedItem() == null || editRole.getSelectionModel().getSelectedItem() == null || editWorkplace.getSelectionModel().getSelectedItem() == null || editPassword != editPasswordRepeat) { return; } // if (editRole.getSelectionModel().getSelectedItem() == null) { // return; // if (editWorkplace.getSelectionModel().getSelectedItem() == null) { // return; // if (editPassword != editPasswordRepeat) { // return; UserModel user = new UserModel(MainActivity.editId); user.setUsername(editUsername.getText()); user.setPassword(Encryption.hash(editPassword.getText())); user.setFirstname(editFirstname.getText()); user.setPrefix(editPrefix.getText()); user.setLastname(editLastname.getText()); user.setPostalcode(editPostalcode.getText()); user.setAddress(editAddress.getText()); user.setResidence(editResidence.getText()); user.setLocationId(Integer.toString(editWorkplace.getSelectionModel().getSelectedItem().getId())); user.setTelephone(editTelephone.getText()); user.setMobile(editMobile.getText()); user.save(); UsersController userController = (UsersController) StageHelper.callbackController; userController.listOnSearch(); editCancel(); } public void setViewFields() { UserModel user = new UserModel(MainActivity.viewId); viewUsername.setText(user.getUsername()); //viewPassword.setText(user.getPassword()); viewFirstname.setText(user.getFirstname()); viewPrefix.setText(user.getPrefix()); viewLastname.setText(user.getLastname()); viewAddress.setText(user.getAddress()); viewPostalcode.setText(user.getPostalcode()); viewResidence.setText(user.getResidence()); viewTelephone.setText(user.getTelephone()); viewMobile.setText(user.getMobile()); viewGender.getSelectionModel().select(user.getGender().toUpperCase()); viewRole.getSelectionModel().select(user.getRole().toUpperCase()); viewWorkplace.getSelectionModel().select(user.getLocation()); } public void viewClose() { Stage cancelStage = (Stage) viewClose.getScene().getWindow(); StageHelper.closeStage(cancelStage); } public void editCancel() { Stage cancelStage = (Stage) editCancel.getScene().getWindow(); StageHelper.closeStage(cancelStage); } }
package gngs.coverage; import htsjdk.samtools.SAMRecord; public class ReadRange { public ReadRange(SAMRecord r) { this.referenceIndex = r.getReferenceIndex(); this.alignmentStart = r.getAlignmentStart(); int alignmentEnd = r.getAlignmentEnd(); final int mateStart = r.getMateAlignmentStart(); // Avoid double counting if two reads from the same fragment will overlap if(r.getFirstOfPairFlag() && mateStart >= this.alignmentStart && mateStart <= alignmentEnd) { alignmentEnd = mateStart; } this.referenceName = r.getReferenceName(); this.alignmentEnd = alignmentEnd; } final public int referenceIndex; final public int alignmentStart; final public int alignmentEnd; final public String referenceName; public String toString() { return String.format("%s:%d-%d", referenceName, alignmentStart, alignmentEnd); } }
package SW9.model_canvas; import SW9.Keybind; import SW9.KeyboardTracker; import SW9.MouseTracker; import SW9.utility.DropShadowHelper; import javafx.animation.Animation; import javafx.animation.Transition; import javafx.event.EventHandler; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyCodeCombination; import javafx.scene.input.MouseEvent; import javafx.scene.layout.Pane; import javafx.scene.shape.Circle; import javafx.util.Duration; public class Location extends Circle { private final static double RADIUS = 25.0f; private boolean isOnMouse = true; public final MouseTracker localMouseTracker = new MouseTracker(); public Location(MouseTracker parentMouseTracker) { this(parentMouseTracker.getX(), parentMouseTracker.getY(), parentMouseTracker); } public Location(final double centerX, final double centerY, final MouseTracker parentMouseTracker) { super(centerX, centerY, RADIUS); // Initialize the local mouse tracker this.setOnMouseMoved(localMouseTracker.onMouseMovedEventHandler); this.setOnMouseClicked(localMouseTracker.onMouseClickedEventHandler); // Add style this.getStyleClass().add("location"); // Update the position of the new location when the mouse moved final EventHandler<MouseEvent> followMouseHandler = mouseMovedEvent -> { Location.this.setCenterX(mouseMovedEvent.getX()); Location.this.setCenterY(mouseMovedEvent.getY()); }; // Place the new location when the mouse is pressed (i.e. stop moving it) final EventHandler<MouseEvent> locationMouseClick = mouseClickedEvent -> { if (isOnMouse) { parentMouseTracker.unregisterOnMouseMovedEventHandler(followMouseHandler); // Tell the canvas that the mouse is no longer occupied ModelCanvas.locationOnMouse = null; KeyboardTracker.unregisterKeybind(removeOnEscape); Animation locationPlaceAnimation = new Transition() { { setCycleDuration(Duration.millis(50)); } protected void interpolate(double frac) { Location.this.setEffect(DropShadowHelper.generateElevationShadow(12 - 12 * frac)); } }; locationPlaceAnimation.play(); locationPlaceAnimation.setOnFinished(event -> { isOnMouse = false; }); } else if (mouseClickedEvent.isShiftDown() && !ModelCanvas.mouseHasEdge()) { final Edge edge = new Edge(this, parentMouseTracker); // Type cast the parent to be the anchor pane and disregard the safety and simple add the edge ((Pane) this.getParent()).getChildren().add(edge); // Notify the canvas that we are creating an edge ModelCanvas.edgeOnMouse = edge; } else if (ModelCanvas.mouseHasEdge()) { ModelCanvas.edgeOnMouse.setTargetLocation(this); ModelCanvas.edgeOnMouse = null; } }; // Register the handler for placing the location localMouseTracker.registerOnMouseClickedEventHandler(locationMouseClick); // Register the handler for dragging of the location (is unregistered when clicked) parentMouseTracker.registerOnMouseMovedEventHandler(followMouseHandler); KeyboardTracker.registerKeybind(removeOnEscape); } private final Keybind removeOnEscape = new Keybind(new KeyCodeCombination(KeyCode.ESCAPE), () -> { Pane parent = (Pane) this.getParent(); if(parent == null) return; parent.getChildren().remove(this); // Notify the canvas that we not longer are creating an edge ModelCanvas.locationOnMouse = null; }); }
package bt.net; import bt.BtException; import bt.service.IConfigurationService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.SocketAddress; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Consumer; public class PeerConnectionPool { private static final Logger LOGGER = LoggerFactory.getLogger(PeerConnectionPool.class); private PeerConnectionFactory connectionFactory; private ExecutorService incomingAcceptor; private HandshakeHandler incomingHandshakeHandler; private ExecutorService executor; private Set<Peer> pendingConnections; private ConcurrentMap<Peer, PeerConnection> connections; private Set<Consumer<PeerConnection>> connectionListeners; private ReentrantReadWriteLock listenerLock; private ReentrantLock connectionLock; public PeerConnectionPool(PeerConnectionFactory connectionFactory, SocketChannelFactory socketChannelFactory, HandshakeHandler incomingHandshakeHandler, IConfigurationService configurationService) { this.connectionFactory = connectionFactory; this.incomingHandshakeHandler = incomingHandshakeHandler; pendingConnections = ConcurrentHashMap.newKeySet(); connections = new ConcurrentHashMap<>(); connectionListeners = new HashSet<>(); listenerLock = new ReentrantReadWriteLock(); connectionLock = new ReentrantLock(); this.incomingAcceptor = Executors.newSingleThreadExecutor( runnable -> new Thread(runnable, "PeerConnectionPool-IncomingAcceptor")); incomingAcceptor.execute(new IncomingAcceptor(socketChannelFactory)); executor = Executors.newCachedThreadPool( new ThreadFactory() { private AtomicInteger threadCount = new AtomicInteger(1); @Override public Thread newThread(Runnable r) { Thread t = new Thread(r, "PeerConnectionPool-Worker[" + threadCount.getAndIncrement() + "]"); t.setDaemon(true); return t; } } ); } public void addConnectionListener(Consumer<PeerConnection> listener) { listenerLock.writeLock().lock(); try { connectionListeners.add(listener); } finally { listenerLock.writeLock().unlock(); } } public void removeConnectionListener(Consumer<PeerConnection> listener) { listenerLock.writeLock().lock(); try { connectionListeners.remove(listener); } finally { listenerLock.writeLock().unlock(); } } public PeerConnection requestConnection(Peer peer, HandshakeHandler handshakeHandler) { PeerConnection existingConnection = connections.get(peer); if (existingConnection == null && !pendingConnections.contains(peer)) { connectionLock.lock(); try { existingConnection = connections.get(peer); if (existingConnection == null && !pendingConnections.contains(peer)) { pendingConnections.add(peer); executor.execute(() -> { try { PeerConnection newConnection = connectionFactory.createConnection(peer); initConnection(newConnection, handshakeHandler, false); } catch (IOException e) { LOGGER.error("Failed to create new outgoing connection for peer: " + peer, e); } finally { connectionLock.lock(); try { pendingConnections.remove(peer); } finally { connectionLock.unlock(); } } }); } } finally { connectionLock.unlock(); } } return existingConnection; } private class IncomingAcceptor implements Runnable { private ServerSocketChannel serverChannel; private SocketAddress localAddress; IncomingAcceptor(SocketChannelFactory socketChannelFactory) { try { serverChannel = socketChannelFactory.getIncomingChannel(); this.localAddress = serverChannel.getLocalAddress(); LOGGER.info("Opened server channel for incoming connections @ " + localAddress); } catch (IOException e) { throw new BtException("Failed to create incoming connections listener " + "-- unexpected I/O exception happened when creating an incoming channel", e); } } @Override public void run() { try { while (true) { acceptIncomingConnection(serverChannel.accept()); } } catch (IOException e) { LOGGER.error("Unexpected I/O error when listening to the incoming channel: " + localAddress, e); try { serverChannel.close(); } catch (IOException e1) { LOGGER.warn("Failed to close the incoming channel", e); } } } } private void acceptIncomingConnection(SocketChannel incomingChannel) { executor.execute(() -> { try { PeerConnection incomingConnection = connectionFactory.createConnection(incomingChannel); initConnection(incomingConnection, incomingHandshakeHandler, true); } catch (IOException e) { LOGGER.error("Failed to process incoming connection", e); } }); } private boolean initConnection(PeerConnection newConnection, HandshakeHandler handshakeHandler, boolean shouldNotifyListeners) { boolean success = handshakeHandler.handleConnection(newConnection); if (success) { addConnection(newConnection, shouldNotifyListeners); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Successfully performed handshake for connection, remote peer: " + newConnection.getRemotePeer() + "; handshake handler: " + handshakeHandler.getClass().getName()); } } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Failed to perform handshake for connection, remote peer: " + newConnection.getRemotePeer() + "; handshake handler: " + handshakeHandler.getClass().getName()); } newConnection.closeQuietly(); } return success; } private void addConnection(PeerConnection newConnection, boolean shouldNotifyListeners) { PeerConnection existingConnection = connections.putIfAbsent(newConnection.getRemotePeer(), newConnection); if (existingConnection != null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Connection already exists for peer: " + newConnection.getRemotePeer()); } newConnection.closeQuietly(); newConnection = existingConnection; } if (shouldNotifyListeners) { listenerLock.readLock().lock(); try { for (Consumer<PeerConnection> listener : connectionListeners) { listener.accept(newConnection); } } finally { listenerLock.readLock().unlock(); } } } }
/** * Various useful statistics utilities * * @author Jaroslav Fowkes */ package codemining.util; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import com.google.common.base.Optional; import com.google.common.math.DoubleMath; public final class StatsUtil { /** * Return the element with the maximum value in the map. * * @param valuedObjects * @return */ public static <T> Optional<T> argmax(final Map<T, Double> valuedObjects) { double max = Double.NEGATIVE_INFINITY; T maxElement = null; for (final Entry<T, Double> entry : valuedObjects.entrySet()) { if (max < entry.getValue()) { max = entry.getValue(); maxElement = entry.getKey(); } } if (maxElement != null) { return Optional.of(maxElement); } else { return Optional.absent(); } } /** * Code ported from LingPipe This method returns the log of the sum of the * natural exponentiated values in the specified array. Mathematically, the * result is * * <blockquote> * * <pre> * logSumOfExponentials(xs) = log <big><big>( &Sigma;</big></big><sub>i</sub> exp(xs[i]) <big><big>)</big></big> * </pre> * * </blockquote> * * But the result is not calculated directly. Instead, the calculation * performed is: * * <blockquote> * * <pre> * logSumOfExponentials(xs) = max(xs) + log <big><big>( &Sigma;</big></big><sub>i</sub> exp(xs[i] - max(xs)) <big><big>)</big></big> * </pre> * * </blockquote> * * which produces the same result, but is much more arithmetically stable, * because the largest value for which <code>exp()</code> is calculated is * 0.0. * * <p> * Values of {@code Double.NEGATIVE_INFINITY} are treated as having * exponentials of 0 and logs of negative infinity. That is, they are * ignored for the purposes of this computation. * * @param values * Array of values. * @return The log of the sum of the exponentiated values in the array. */ public static double log2SumOfExponentials(final Collection<Double> values) { if (values.size() == 1) { return values.iterator().next(); } final double max = max(values); double sum = 0.0; for (final double value : values) { if (value != Double.NEGATIVE_INFINITY) { sum += Math.pow(2, value - max); } } return max + DoubleMath.log2(sum); } public static double log2SumOfExponentials(final double log2Prob1, final double log2Prob2) { final double max; final double min; if (log2Prob1 > log2Prob2) { max = log2Prob1; min = log2Prob2; } else { max = log2Prob2; min = log2Prob1; } final double diff = min - max; if (diff < -54) { // 1. + Math.pow(2, diff) would return 1 and thus we avoid the // computation return max; } else { return max + (Math.log1p(Math.pow(2, diff)) / LN_2); } } /** * Calculates the max of an Array */ public static double max(final double... array) { double max = Double.NEGATIVE_INFINITY; for (final double value : array) { if (max < value) { max = value; } } return max; } /** * Retrieve the max element * * @param values * @return */ public static double max(final Iterable<Double> values) { double max = Double.NEGATIVE_INFINITY; for (final double value : values) { if (max < value) { max = value; } } return max; } /** * Calculates the mean of a Collection */ public static double mean(final Collection<Double> values) { return sum(values) / values.size(); } /** * Calculates the median of a List */ public static double median(final List<Double> values) { Collections.sort(values); final int middle = values.size() / 2; if (values.size() % 2 == 1) { return values.get(middle); } else { return (values.get(middle - 1) + values.get(middle)) / 2.0; } } /** * Retrieve the min element * * @param xs * @return */ public static double min(final Collection<Double> xs) { double min = Double.POSITIVE_INFINITY; for (final double value : xs) { if (min > value) { min = value; } } return min; } /** * Calculates the min of an Array */ public static double min(final double... array) { double min = Double.POSITIVE_INFINITY; for (final double value : array) { if (min > value) { min = value; } } return min; } /** * Calculates the mode of a Collection */ public static double mode(final Collection<Double> values) { double maxValue = 0; int maxCount = 0; for (final Double elementA : values) { int count = 0; for (final Double elementB : values) { if (elementB.equals(elementA)) { ++count; } } if (count > maxCount) { maxCount = count; maxValue = elementA; } } return maxValue; } /** * Calculates the norm of an Array */ public static double norm(final double... array) { double norm = 0; for (final double element : array) { norm += element * element; } return Math.sqrt(norm); } /** * Normalize the given probabilities in place. * * @param memebrshipPcts */ public static <T> void normalizeLog2Probs(final Map<T, Double> log2prob) { final double sum = log2SumOfExponentials(log2prob.values()); for (final Entry<T, Double> entry : log2prob.entrySet()) { entry.setValue(Math.pow(2, entry.getValue() - sum)); } } /** * Calculates the sum of an Array */ public static double sum(final double... array) { double sum = 0; for (final double element : array) { sum += element; } return sum; } /** * Calculates the sum of a Collection */ public static double sum(final Iterable<Double> values) { double sum = 0; for (final Double element : values) { sum += element; } return sum; } private static final double LN_2 = Math.log(2); }
package com.crawljax.core; import java.util.ArrayList; import java.util.List; import net.jcip.annotations.GuardedBy; import org.apache.log4j.Logger; import com.crawljax.browser.EmbeddedBrowser; import com.crawljax.core.configuration.CrawljaxConfigurationReader; import com.crawljax.core.plugin.CrawljaxPluginsUtil; import com.crawljax.core.state.Eventable; import com.crawljax.core.state.Identification; import com.crawljax.core.state.StateFlowGraph; import com.crawljax.core.state.StateMachine; import com.crawljax.core.state.StateVertix; import com.crawljax.core.state.Eventable.EventType; import com.crawljax.forms.FormHandler; import com.crawljax.forms.FormInput; import com.crawljax.util.ElementResolver; /** * Class that performs crawl actions. It is designed to be run inside a Thread * * @see #run() * @author dannyroest@gmail.com (Danny Roest) * @author Stefan Lenselink <S.R.Lenselink@student.tudelft.nl> * @version $Id$ */ public class Crawler implements Runnable { private static final Logger LOGGER = Logger.getLogger(Crawler.class.getName()); private static final int ONE_SECOND = 1000; /** * The main browser window 1 to 1 relation; Every Thread will get on browser assigned in the run * function. */ private EmbeddedBrowser browser; /** * The central DataController. This is a multiple to 1 relation Every Thread shares an instance * of the same controller! All operations / fields used in the controller should be checked for * Thread safety */ private final CrawljaxController controller; /** * Depth register. */ private int depth = 0; /** * The path followed from the index to the current state. */ private List<Eventable> exactEventPath = new ArrayList<Eventable>(); /** * TODO Stefan why is there two times the same variable? What is the difference and could it be * merged? The path followed from the index to the current state. */ private final List<Eventable> crawlPath = new ArrayList<Eventable>(); /** * The utility which is used to extract the candidate clickables. */ private CandidateElementExtractor candidateExtractor; private boolean fired = false; /** * The name of this Crawler when not default (automatic) this will be added to the Thread name * in the {@link CrawlThreadFactory} as (name). In the * {@link CrawlThreadFactory#newThread(Runnable)} the name is retrieved using the * {@link #toString()} function. * * @see Crawler#toString() * @see CrawlThreadFactory#newThread(Runnable) */ private String name = ""; /** * The sateMachine for this Crawler, keeping track of the path crawled by this Crawler. TODO * Stefan its better to have this final... */ private StateMachine stateMachine; private final CrawljaxConfigurationReader configurationReader; private FormHandler formHandler; /** * Enum for describing what has happened after a {@link Crawler#clickTag(Eventable, boolean)} * has been performed. * * @see Crawler#clickTag(Eventable, boolean) */ private enum ClickResult { cloneDetected, newState, domUnChanged } /** * Crawler constructor for a new 'starting from scratch(index)' crawler. * * @param mother * the main CrawljaxController */ public Crawler(CrawljaxController mother) { this(mother, new ArrayList<Eventable>()); if (this.browser == null) { /** * The Crawler is created with only a controller so probably its requested from the * CrawljaxController Create a new Browser to prevent null pointers :). Creating a * browser here would result in NOT loading the initial page in the run operation! This * MUST be done by hand! */ try { browser = mother.getBrowserFactory().requestBrowser(); } catch (InterruptedException e) { LOGGER.error("The request for a browser was interuped", e); } } /** * Reset the state machine to null, dropping the existing state machine, as this call is * from the CrawljaxController the initial State is not known yet and causes trouble. The * CrawljaxController must create & set the first stateMachine using the setStateMachine * which on his case checks is the stateMachine is not set for safety reasons. */ stateMachine = null; } /** * @param mother * the main CrawljaxController * @param exactEventPath * the event path up till this moment. * @param name * a name for this crawler (default is empty). */ public Crawler(CrawljaxController mother, List<Eventable> exactEventPath, String name) { this(mother, exactEventPath); this.name = name; } /** * Private Crawler constructor for a 'reload' crawler. only used from internal * * @param mother * the main CrawljaxController * @param returnPath * the path used to return to the last state, this can be a empty list */ private Crawler(CrawljaxController mother, List<Eventable> returnPath) { this.exactEventPath = returnPath; this.controller = mother; stateMachine = controller.buildNewStateMachine(); this.configurationReader = controller.getConfigurationReader(); } /** * Brings the browser to the initial state. * * @throws CrawljaxException * an exception when the index page can not be loaded */ public void goToInitialURL() throws CrawljaxException { LOGGER.info("Loading Page " + configurationReader.getCrawlSpecificationReader().getSiteUrl()); browser.goToUrl(configurationReader.getCrawlSpecificationReader().getSiteUrl()); /** * Thread safe */ controller.doBrowserWait(browser); CrawljaxPluginsUtil.runOnUrlLoadPlugins(browser); } /** * Try to fire a given event on the Browser. TODO This method has been made public for the * CrossBrowserTest only. * * @param eventable * the eventable to fire * @return true iff the event is fired */ private boolean fireEvent(Eventable eventable) { try { if (eventable.getIdentification().getHow().equals("xpath") && eventable.getRelatedFrame().equals("")) { /** * The path in the page to the 'clickable' (link, div, span, etc) */ String xpath = eventable.getIdentification().getValue(); /** * The type of event to execute on the 'clickable' like onClick, mouseOver, hover, * etc */ EventType eventType = eventable.getEventType(); /** * Try to find a 'better' / 'quicker' xpath */ String newXPath = new ElementResolver(eventable, browser).resolve(); if (newXPath != null) { if (!xpath.equals(newXPath)) { LOGGER.info("XPath changed from " + xpath + " to " + newXPath + " relatedFrame:" + eventable.getRelatedFrame()); eventable = new Eventable(new Identification(Identification.How.xpath, newXPath), eventType); } } } if (browser.fireEvent(eventable)) { /** * Let the controller execute its specified wait operation on the browser Thread * safe */ controller.doBrowserWait(browser); /** * Close opened windows */ browser.closeOtherWindows(); return true; // A event fired } else { /** * Execute the OnFireEventFailedPlugins with the current crawlPath with the * crawlPath removed 1 state to represent the path TO here. */ CrawljaxPluginsUtil.runOnFireEventFailedPlugins(eventable, crawlPath.subList(0, crawlPath.size() - 1)); return false; // no event fired } } catch (Exception e) { LOGGER.error(e.getMessage(), e); } return false; // As we are here there was a error... so definitely there is no event fired. } /** * Enters the form data. First, the related input elements (if any) to the eventable are filled * in and then it tries to fill in the remaining input elements. * * @param eventable * the eventable element. */ private void handleInputElements(Eventable eventable) { List<FormInput> formInputs = eventable.getRelatedFormInputs(); for (FormInput formInput : formHandler.getFormInputs()) { if (!formInputs.contains(formInput)) { formInputs.add(formInput); } } eventable.setRelatedFormInputs(formInputs); formHandler.handleFormElements(formInputs); } /** * Reload the browser following the {@link #exactEventPath} to the given currentEvent. * * @throws CrawljaxException * if the crawler encounters an error. */ private void goBackExact() throws CrawljaxException { /** * Thread safe */ StateVertix curState = controller.getSession().getInitialState(); // remove the currentEvent from the list if (exactEventPath.size() > 0) { for (Eventable clickable : exactEventPath) { if (!controller.getElementChecker().checkCrawlCondition(browser)) { return; } LOGGER.info("Backtracking by executing " + clickable.getEventType() + " on element: " + clickable); stateMachine.changeState(clickable.getTargetStateVertix()); curState = clickable.getTargetStateVertix(); crawlPath.add(clickable); this.handleInputElements(clickable); if (this.fireEvent(clickable)) { // TODO ali, do not increase depth if eventable is from guidedcrawling depth++; /** * Run the onRevisitStateValidator(s) TODO Stefan check for thread safety */ CrawljaxPluginsUtil.runOnRevisitStatePlugins(this.controller.getSession(), curState); } if (!controller.getElementChecker().checkCrawlCondition(browser)) { return; } } } } /** * @param eventable * the element to execute an action on. * @param handleInputElements * if inputs should be handled.. * @return the result of the click operation * @throws CrawljaxException * an exception. */ private ClickResult clickTag(final Eventable eventable, boolean handleInputElements) throws CrawljaxException { // load input element values if (handleInputElements) { this.handleInputElements(eventable); } LOGGER.info("Executing " + eventable.getEventType() + " on element: " + eventable + "; State: " + stateMachine.getCurrentState().getName()); if (this.fireEvent(eventable)) { // String dom = new String(browser.getDom()); StateVertix newState = new StateVertix(browser.getCurrentUrl(), controller.getSession() .getStateFlowGraph().getNewStateName(), browser.getDom(), this.controller.getStripedDom(browser)); if (isDomChanged(stateMachine.getCurrentState(), newState)) { crawlPath.add(eventable); if (stateMachine.update(eventable, newState, this.getBrowser(), this.controller .getSession())) { // Dom changed // No Clone exactEventPath.add(eventable); CrawljaxPluginsUtil.runGuidedCrawlingPlugins(controller, controller .getSession(), getExacteventpath(), this.stateMachine); return ClickResult.newState; } else { // Dom changed; Clone return ClickResult.cloneDetected; } } } // Event not fired or // Dom not changed return ClickResult.domUnChanged; } /** * Return the Exacteventpath. * * @return the exacteventpath */ public final List<Eventable> getExacteventpath() { return exactEventPath; } /** * Have we reached the depth limit? * * @param depth * the current depth. Added as argument so this call can be moved out if desired. * @return true if the limit has been reached */ private boolean depthLimitReached(int depth) { if (this.depth >= configurationReader.getCrawlSpecificationReader().getDepth() && configurationReader.getCrawlSpecificationReader().getDepth() != 0) { LOGGER.info("DEPTH " + depth + " reached returning from rec call. Given depth: " + configurationReader.getCrawlSpecificationReader().getDepth()); return true; } else { return false; } } /** * Crawl through the clickables. * * @throws CrawljaxException * if an exception is thrown. */ private boolean crawl() throws CrawljaxException { if (depthLimitReached(depth)) { return true; } if (!checkConstraints()) { return false; } // Store the currentState to be able to 'back-track' later. StateVertix orrigionalState = stateMachine.getCurrentState(); orrigionalState.searchForCandidateElements(candidateExtractor, configurationReader .getTagElements(), configurationReader.getExcludeTagElements(), configurationReader.getCrawlSpecificationReader().getClickOnce()); LOGGER.info("Starting preStateCrawlingPlugins..."); CrawljaxPluginsUtil.runPreStateCrawlingPlugins(controller.getSession(), orrigionalState .getUnprocessedCandidateElements()); boolean handleInputElements = true; for (CandidateCrawlAction action : orrigionalState) { CandidateElement candidateElement = action.getCandidateElement(); EventType eventType = action.getEventType(); if (candidateElement.allConditionsSatisfied(browser)) { ClickResult clickResult = clickTag(new Eventable(candidateElement, eventType), handleInputElements); switch (clickResult) { case cloneDetected: fired = false; // TODO A optimisation could be to check the new state (== clone) to see // if there is unfinished work and continue with that so reload can be // Postponed and 1 reload can be saved. this.controller.getSession().addCrawlPath(crawlPath); if (orrigionalState.hasMoreToExplore()) { controller.addWorkToQueue(new Crawler(this.controller, getCurrentExactPaths(false))); } return true; case newState: fired = true; // Recurse because new state found if (orrigionalState.hasMoreToExplore()) { controller.addWorkToQueue(new Crawler(this.controller, getCurrentExactPaths(true))); } return newStateDetected(orrigionalState); case domUnChanged: // Dom not updated, continue with the next handleInputElements = false; break; default: break; } } else { LOGGER.info("Conditions not satisfied for element: " + candidateElement + "; State: " + stateMachine.getCurrentState().getName()); } } return true; } /** * A new state has been found! * * @param orrigionalState * the current state * @return true if crawling must continue false otherwise. * @throws CrawljaxException */ private boolean newStateDetected(StateVertix orrigionalState) throws CrawljaxException { /** * An event has been fired so we are one level deeper */ depth++; LOGGER.info("RECURSIVE Call crawl; Current DEPTH= " + depth); if (!this.crawl()) { // Crawling has stopped controller.terminate(); return false; } stateMachine.changeState(orrigionalState); return true; } /** * Return the exactEventPath to be used in creating a new Crawler. * * @param removeLastElement * if set to true the last element will not be in the crawlPath. * @return the crawlPath leading to the current state. */ private ArrayList<Eventable> getCurrentExactPaths(boolean removeLastElement) { ArrayList<Eventable> path = new ArrayList<Eventable>(); for (Eventable eventable : this.exactEventPath) { Eventable e = eventable.clone(); path.add(e); // path.add(eventable); } // Remove the last entry because we want to be able to go back // into the original state where the last change (last in list) // was made if (removeLastElement && path.size() > 0) { path.remove(path.size() - 1); } return path; } /** * Initialise the Crawler, retrieve a Browser and go to the initail url when no browser was * present. rewind the state machine and goBack to the state if there is exactEventPath is * specified. */ public void init() { /** * If the browser is null place a request for a browser from the BrowserFactory */ if (this.browser == null) { try { this.browser = controller.getBrowserFactory().requestBrowser(); } catch (InterruptedException e1) { LOGGER.error("The request for a browser was interuped", e1); } LOGGER.info("Reloading page for navigating back since browser is not initialized."); try { this.goToInitialURL(); } catch (Exception e) { LOGGER.error("Could not load the initialURL", e); } } // TODO Stefan ideally this should be placed in the constructor this.formHandler = new FormHandler(browser, configurationReader.getInputSpecification(), configurationReader.getCrawlSpecificationReader().getRandomInputInForms()); this.candidateExtractor = new CandidateElementExtractor(controller.getElementChecker(), this.getBrowser(), formHandler); stateMachine.rewind(); /** * Do we need to go back into a previous state? */ if (exactEventPath.size() > 0) { try { this.goBackExact(); } catch (Exception e) { LOGGER.error("Failed to backtrack", e); } } } /** * Terminate and clean up this Crawler, release the aquired browser. Notice that other Crawlers * might still be active. */ public void shutdown() { controller.getBrowserFactory().freeBrowser(this.browser); } /** * The main function stated by the ExecutorService. Crawlers add themselves to the list by * calling {@link CrawljaxController#addWorkToQueue(Crawler)}. When the ExecutorService finds a * free thread this method is called and when this method ends the Thread is released again and * a new Thread is started * * @see java.util.concurrent.Executors#newFixedThreadPool(int) * @see java.util.concurrent.ExecutorService {@inheritDoc} */ @Override public void run() { /** * Init the Crawler */ this.init(); try { /** * Hand over the main crawling */ this.crawl(); /** * Crawling is done; so the crawlPath of the current branch is known */ // TODO Stefan Delete the fired variable if possible? Or move this is not the correct // location. if (fired) { controller.getSession().addCrawlPath(crawlPath); } } catch (Exception e) { LOGGER.error("Crawl failed!", e); } finally { /** * At last failure or non shutdown the Crawler. */ this.shutdown(); } } /** * Return the browser used in this Crawler Thread. * * @return the browser used in this Crawler Thread */ public final EmbeddedBrowser getBrowser() { return browser; } @Override public String toString() { return this.name; } /** * Set the stateMachine that must be used, be careful! This must only be called during the init * of the CrawljaxController. * * @throws CrawljaxException * will be thrown when the stateMachine is already set! * @param machine * the stateMachine to set. */ public void setStateMachine(final StateMachine machine) throws CrawljaxException { if (stateMachine != null) { throw new CrawljaxException( "The stateMachine is allready specified can not be overwritten!"); } this.stateMachine = machine; } /** * @return the state machine. */ public StateMachine getStateMachine() { return stateMachine; } /** * Test to see if the (new) dom is changed with regards to the old dom. This method is Thread * safe. * * @param stateBefore * the state before the event. * @param stateAfter * the state after the event. * @return true if the state is changed according to the compare method of the oracle. */ private boolean isDomChanged(final StateVertix stateBefore, final StateVertix stateAfter) { boolean isChanged = false; // do not need Oracle Comparators now, because hash of stripped dom is // already calculated // isChanged = !stateComparator.compare(stateBefore.getDom(), // stateAfter.getDom(), browser); isChanged = !stateAfter.equals(stateBefore); if (isChanged) { LOGGER.info("Dom is Changed!"); } else { LOGGER.info("Dom Not Changed!"); } return isChanged; } /** * Checks the state and time constraints. This function is nearly Thread-safe * * @return true if all conditions are met. */ @GuardedBy("stateFlowGraph") private boolean checkConstraints() { long timePassed = System.currentTimeMillis() - controller.getSession().getStartTime(); int maxCrawlTime = configurationReader.getCrawlSpecificationReader().getMaximumRunTime(); if ((maxCrawlTime != 0) && (timePassed > maxCrawlTime * ONE_SECOND)) { /* remove all possible candidates left */ // EXACTEVENTPATH.clear(); TODO Stefan: FIX this! LOGGER.info("Max time " + maxCrawlTime + " seconds passed!"); /* stop crawling */ return false; } StateFlowGraph graph = controller.getSession().getStateFlowGraph(); // TODO Stefan is this needed? int maxNumberOfStates = configurationReader.getCrawlSpecificationReader().getMaxNumberOfStates(); synchronized (graph) { if ((maxNumberOfStates != 0) && (graph.getAllStates().size() >= maxNumberOfStates)) { /* remove all possible candidates left */ // EXACTEVENTPATH.clear(); TODO Stefan: FIX this! LOGGER.info("Max number of states " + maxNumberOfStates + " reached!"); /* stop crawling */ return false; } } /* continue crawling */ return true; } }
package com.crawljax.core; import com.crawljax.browser.EmbeddedBrowser; import com.crawljax.core.configuration.CrawljaxConfigurationReader; import com.crawljax.core.plugin.CrawljaxPluginsUtil; import com.crawljax.core.state.Eventable; import com.crawljax.core.state.Eventable.EventType; import com.crawljax.core.state.Identification; import com.crawljax.core.state.StateFlowGraph; import com.crawljax.core.state.StateMachine; import com.crawljax.core.state.StateVertix; import com.crawljax.forms.FormHandler; import com.crawljax.forms.FormInput; import com.crawljax.util.ElementResolver; import org.apache.log4j.Logger; import java.util.ArrayList; import java.util.List; /** * Class that performs crawl actions. It is designed to be run inside a Thread. * * @see #run() * @author dannyroest@gmail.com (Danny Roest) * @author Stefan Lenselink <S.R.Lenselink@student.tudelft.nl> * @version $Id$ */ public class Crawler implements Runnable { private static final Logger LOGGER = Logger.getLogger(Crawler.class.getName()); private static final int ONE_SECOND = 1000; /** * The main browser window 1 to 1 relation; Every Thread will get on browser assigned in the run * function. */ private EmbeddedBrowser browser; /** * The central DataController. This is a multiple to 1 relation Every Thread shares an instance * of the same controller! All operations / fields used in the controller should be checked for * thread safety. */ private final CrawljaxController controller; /** * Depth register. */ private int depth = 0; /** * The path followed from the index to the current state. */ private final List<Eventable> exactEventPath = new ArrayList<Eventable>(); /** * TODO Stefan why is there two times the same variable? What is the difference and could it be * merged? The path followed from the index to the current state. Danny: From the state-flow * graph one cannot derive which paths are crawled. This is for example required for regression * testing. */ private final List<Eventable> crawlPath = new ArrayList<Eventable>(); /** * The utility which is used to extract the candidate clickables. */ private CandidateElementExtractor candidateExtractor; private boolean fired = false; /** * The name of this Crawler when not default (automatic) this will be added to the Thread name * in the {@link CrawlThreadFactory} as (name). In the * {@link CrawlThreadFactory#newThread(Runnable)} the name is retrieved using the * {@link #toString()} function. * * @see Crawler#toString() * @see CrawlThreadFactory#newThread(Runnable) */ private String name = ""; /** * The sateMachine for this Crawler, keeping track of the path crawled by this Crawler. */ private final StateMachine stateMachine; private final CrawljaxConfigurationReader configurationReader; private FormHandler formHandler; /** * The object to places calls to add new Crawlers or to remove one. */ private final CrawlQueueManager crawlQueueManager; /** * Enum for describing what has happened after a {@link Crawler#clickTag(Eventable, boolean)} * has been performed. * * @see Crawler#clickTag(Eventable, boolean) */ private enum ClickResult { cloneDetected, newState, domUnChanged } /** * @param mother * the main CrawljaxController * @param exactEventPath * the event path up till this moment. * @param name * a name for this crawler (default is empty). */ public Crawler(CrawljaxController mother, List<Eventable> exactEventPath, String name) { this(mother, exactEventPath); this.name = name; } /** * Private Crawler constructor for a 'reload' crawler. Only used internally. * * @param mother * the main CrawljaxController * @param returnPath * the path used to return to the last state, this can be a empty list */ protected Crawler(CrawljaxController mother, List<Eventable> returnPath) { this.exactEventPath.addAll(returnPath); this.controller = mother; this.configurationReader = controller.getConfigurationReader(); this.crawlQueueManager = mother.getCrawlQueueManager(); if (controller.getSession() != null) { this.stateMachine = new StateMachine(controller.getSession().getStateFlowGraph(), controller.getSession().getInitialState(), controller.getInvariantList()); } else { /** * Reset the state machine to null, because there is no session where to load the * stateFlowGraph from. */ this.stateMachine = null; } } /** * Brings the browser to the initial state. * * @throws CrawljaxException * an exception when the index page can not be loaded */ public void goToInitialURL() throws CrawljaxException { LOGGER.info( "Loading Page " + configurationReader.getCrawlSpecificationReader().getSiteUrl()); getBrowser().goToUrl(configurationReader.getCrawlSpecificationReader().getSiteUrl()); /** * Thread safe */ controller.doBrowserWait(getBrowser()); CrawljaxPluginsUtil.runOnUrlLoadPlugins(getBrowser()); } /** * Try to fire a given event on the Browser. * * @param eventable * the eventable to fire * @return true iff the event is fired */ private boolean fireEvent(Eventable eventable) { try { // TODO Stefan; FindBugs found this bug, not yet solved // Should be changed with: // eventable.getIdentification().getHow().toString().equals("xpath") if (eventable.getIdentification().getHow().equals("xpath") && eventable.getRelatedFrame().equals("")) { /** * The path in the page to the 'clickable' (link, div, span, etc) */ String xpath = eventable.getIdentification().getValue(); /** * The type of event to execute on the 'clickable' like onClick, mouseOver, hover, * etc */ EventType eventType = eventable.getEventType(); /** * Try to find a 'better' / 'quicker' xpath */ String newXPath = new ElementResolver(eventable, getBrowser()).resolve(); if (newXPath != null) { if (!xpath.equals(newXPath)) { LOGGER.info("XPath changed from " + xpath + " to " + newXPath + " relatedFrame:" + eventable.getRelatedFrame()); eventable = new Eventable( new Identification(Identification.How.xpath, newXPath), eventType); } } } if (getBrowser().fireEvent(eventable)) { /** * Let the controller execute its specified wait operation on the browser thread * safe. */ controller.doBrowserWait(getBrowser()); /** * Close opened windows */ getBrowser().closeOtherWindows(); return true; // A event fired } else { /** * Execute the OnFireEventFailedPlugins with the current crawlPath with the * crawlPath removed 1 state to represent the path TO here. */ int limit = crawlPath.size() - 1; if (limit < 0) { limit = 0; } CrawljaxPluginsUtil.runOnFireEventFailedPlugins( eventable, crawlPath.subList(0, limit)); return false; // no event fired } } catch (Exception e) { LOGGER.error(e.getMessage(), e); } return false; // If we arrive here, there was an error, so no event fired. } /** * Enters the form data. First, the related input elements (if any) to the eventable are filled * in and then it tries to fill in the remaining input elements. * * @param eventable * the eventable element. */ private void handleInputElements(Eventable eventable) { List<FormInput> formInputs = eventable.getRelatedFormInputs(); for (FormInput formInput : formHandler.getFormInputs()) { if (!formInputs.contains(formInput)) { formInputs.add(formInput); } } eventable.setRelatedFormInputs(formInputs); formHandler.handleFormElements(formInputs); } /** * Reload the browser following the {@link #exactEventPath} to the given currentEvent. * * @throws CrawljaxException * if the crawler encounters an error. */ private void goBackExact() throws CrawljaxException { /** * Thread safe */ StateVertix curState = controller.getSession().getInitialState(); for (Eventable clickable : exactEventPath) { if (!controller.getElementChecker().checkCrawlCondition(getBrowser())) { return; } LOGGER.info("Backtracking by executing " + clickable.getEventType() + " on element: " + clickable); this.getStateMachine().changeState(clickable.getTargetStateVertix()); curState = clickable.getTargetStateVertix(); crawlPath.add(clickable); this.handleInputElements(clickable); if (this.fireEvent(clickable)) { // TODO ali, do not increase depth if eventable is from guidedcrawling depth++; /** * Run the onRevisitStateValidator(s) */ CrawljaxPluginsUtil.runOnRevisitStatePlugins( this.controller.getSession(), curState); } if (!controller.getElementChecker().checkCrawlCondition(getBrowser())) { return; } } } /** * @param eventable * the element to execute an action on. * @param handleInputElements * if inputs should be handled.. * @return the result of the click operation * @throws CrawljaxException * an exception. */ private ClickResult clickTag(final Eventable eventable) throws CrawljaxException { // load input element values this.handleInputElements(eventable); LOGGER.info("Executing " + eventable.getEventType() + " on element: " + eventable + "; State: " + this.getStateMachine().getCurrentState().getName()); if (this.fireEvent(eventable)) { StateVertix newState = new StateVertix(getBrowser().getCurrentUrl(), controller.getSession().getStateFlowGraph().getNewStateName(), getBrowser().getDom(), this.controller.getStrippedDom(getBrowser())); if (isDomChanged(this.getStateMachine().getCurrentState(), newState)) { // Dom is changed, so data might need be filled in again crawlPath.add(eventable); // TODO Stefan; Fix this behaviour, this causes trouble + performance... this.controller.getSession().setExactEventPath(getExacteventpath()); if (this.getStateMachine().update( eventable, newState, this.getBrowser(), this.controller.getSession())) { // Dom changed // No Clone exactEventPath.add(eventable); CrawljaxPluginsUtil.runGuidedCrawlingPlugins(controller, controller.getSession(), getExacteventpath(), this.getStateMachine()); return ClickResult.newState; } else { // Dom changed; Clone return ClickResult.cloneDetected; } } } // Event not fired or, Dom not changed return ClickResult.domUnChanged; } /** * Return the Exacteventpath. * * @return the exacteventpath */ public final List<Eventable> getExacteventpath() { return exactEventPath; } /** * Have we reached the depth limit? * * @param depth * the current depth. Added as argument so this call can be moved out if desired. * @return true if the limit has been reached */ private boolean depthLimitReached(int depth) { if (this.depth >= configurationReader.getCrawlSpecificationReader().getDepth() && configurationReader.getCrawlSpecificationReader().getDepth() != 0) { LOGGER.info("DEPTH " + depth + " reached returning from rec call. Given depth: " + configurationReader.getCrawlSpecificationReader().getDepth()); return true; } else { return false; } } private void spawnThreads(StateVertix state, boolean removeLastStateFromEventPath) { Crawler c = null; do { if (c != null) { this.crawlQueueManager.addWorkToQueue(c); } c = new Crawler(this.controller, getCurrentExactPaths(removeLastStateFromEventPath)); } while (state.registerCrawler(c)); } private ClickResult crawlAction(CandidateCrawlAction action) throws CrawljaxException { CandidateElement candidateElement = action.getCandidateElement(); EventType eventType = action.getEventType(); StateVertix orrigionalState = this.getStateMachine().getCurrentState(); if (candidateElement.allConditionsSatisfied(getBrowser())) { ClickResult clickResult = clickTag(new Eventable(candidateElement, eventType)); switch (clickResult) { case cloneDetected: fired = false; // We are in the clone state so we continue with the cloned version to search // for work. this.controller.getSession().addCrawlPath(crawlPath); spawnThreads(orrigionalState, false); break; case newState: fired = true; // Recurse because new state found spawnThreads(orrigionalState, true); break; case domUnChanged: // Dom not updated, continue with the next break; default: break; } return clickResult; } else { LOGGER.info("Conditions not satisfied for element: " + candidateElement + "; State: " + this.getStateMachine().getCurrentState().getName()); } return ClickResult.domUnChanged; } /** * Crawl through the clickables. * * @throws CrawljaxException * if an exception is thrown. */ private boolean crawl() throws CrawljaxException { if (depthLimitReached(depth)) { return true; } if (!checkConstraints()) { return false; } // Store the currentState to be able to 'back-track' later. StateVertix orrigionalState = this.getStateMachine().getCurrentState(); if (orrigionalState.searchForCandidateElements(candidateExtractor, configurationReader.getTagElements(), configurationReader.getExcludeTagElements(), configurationReader.getCrawlSpecificationReader().getClickOnce())) { // Only execute the preStateCrawlingPlugins when it's the first time LOGGER.info("Starting preStateCrawlingPlugins..."); CrawljaxPluginsUtil.runPreStateCrawlingPlugins( controller.getSession(), orrigionalState.getUnprocessedCandidateElements()); } CandidateCrawlAction action = orrigionalState.pollCandidateCrawlAction(this, crawlQueueManager); while (action != null) { if (depthLimitReached(depth)) { return true; } if (!checkConstraints()) { return false; } ClickResult result = this.crawlAction(action); orrigionalState.finishedWorking(this, action); switch (result) { case newState: return newStateDetected(orrigionalState); case cloneDetected: return true; default: break; } action = orrigionalState.pollCandidateCrawlAction(this, crawlQueueManager); } return true; } /** * A new state has been found! * * @param orrigionalState * the current state * @return true if crawling must continue false otherwise. * @throws CrawljaxException */ private boolean newStateDetected(StateVertix orrigionalState) throws CrawljaxException { /** * An event has been fired so we are one level deeper */ depth++; LOGGER.info("RECURSIVE Call crawl; Current DEPTH= " + depth); if (!this.crawl()) { // Crawling has stopped controller.terminate(); return false; } this.getStateMachine().changeState(orrigionalState); return true; } /** * Return the exactEventPath to be used in creating a new Crawler. * * @param removeLastElement * if set to true the last element will not be in the crawlPath. * @return the crawlPath leading to the current state. */ private ArrayList<Eventable> getCurrentExactPaths(boolean removeLastElement) { ArrayList<Eventable> path = new ArrayList<Eventable>(); for (Eventable eventable : this.exactEventPath) { Eventable e = eventable.clone(); path.add(e); // path.add(eventable); } // Remove the last entry because we want to be able to go back // into the original state where the last change (last in list) // was made if (removeLastElement && path.size() > 0) { path.remove(path.size() - 1); } return path; } /** * Initialize the Crawler, retrieve a Browser and go to the initial URL when no browser was * present. rewind the state machine and goBack to the state if there is exactEventPath is * specified. */ public void init() { this.browser = this.getBrowser(); if (this.browser == null) { /** * As the browser is null, request one and got to the initial URL, if the browser is * Already set the browser will be in the initial URL. */ try { this.browser = controller.getBrowserPool().requestBrowser(); } catch (InterruptedException e1) { LOGGER.error("The request for a browser was interuped", e1); } LOGGER.info("Reloading page for navigating back"); try { this.goToInitialURL(); } catch (Exception e) { LOGGER.error("Could not load the initialURL", e); } } // TODO Stefan ideally this should be placed in the constructor this.formHandler = new FormHandler(getBrowser(), configurationReader.getInputSpecification(), configurationReader .getCrawlSpecificationReader().getRandomInputInForms()); this.candidateExtractor = new CandidateElementExtractor( controller.getElementChecker(), this.getBrowser(), formHandler); /** * go back into the previous state. */ try { this.goBackExact(); } catch (Exception e) { LOGGER.error("Failed to backtrack", e); } } /** * Terminate and clean up this Crawler, release the acquired browser. Notice that other Crawlers * might still be active. So this function does NOT shutdown all Crawlers active that should be * done with {@link CrawlerExecutor#shutdown()} */ public void shutdown() { controller.getBrowserPool().freeBrowser(this.getBrowser()); } /** * The main function stated by the ExecutorService. Crawlers add themselves to the list by * calling {@link CrawlQueueManager#addWorkToQueue(Crawler)}. When the ExecutorService finds a * free thread this method is called and when this method ends the Thread is released again and * a new Thread is started * * @see java.util.concurrent.Executors#newFixedThreadPool(int) * @see java.util.concurrent.ExecutorService {@inheritDoc} */ @Override public void run() { if (!checkConstraints()) { // Constrains are not met at start of this Crawler, so stop immediately return; } if (exactEventPath.size() > 0) { try { if (!exactEventPath.get(exactEventPath.size() - 1).getTargetStateVertix() .startWorking(this)) { LOGGER.warn("BAH!"); return; } } catch (CrawljaxException e) { LOGGER.error("Received Crawljax exception", e); } } /** * Init the Crawler */ this.init(); try { /** * Hand over the main crawling */ if (!this.crawl()) { controller.terminate(); } /** * Crawling is done; so the crawlPath of the current branch is known */ if (fired) { controller.getSession().addCrawlPath(crawlPath); } } catch (Exception e) { LOGGER.error("Crawl failed!", e); } finally { /** * At last failure or non shutdown the Crawler. */ this.shutdown(); } } /** * Return the browser used in this Crawler Thread. * * @return the browser used in this Crawler Thread */ public EmbeddedBrowser getBrowser() { return browser; } @Override public String toString() { return this.name; } /** * @return the state machine. */ public StateMachine getStateMachine() { return stateMachine; } /** * Test to see if the (new) DOM is changed with regards to the old DOM. This method is Thread * safe. * * @param stateBefore * the state before the event. * @param stateAfter * the state after the event. * @return true if the state is changed according to the compare method of the oracle. */ private boolean isDomChanged(final StateVertix stateBefore, final StateVertix stateAfter) { boolean isChanged = false; // do not need Oracle Comparators now, because hash of stripped dom is // already calculated // isChanged = !stateComparator.compare(stateBefore.getDom(), // stateAfter.getDom(), browser); isChanged = !stateAfter.equals(stateBefore); if (isChanged) { LOGGER.info("Dom is Changed!"); } else { LOGGER.info("Dom Not Changed!"); } return isChanged; } /** * Checks the state and time constraints. This function is nearly Thread-safe. * * @return true if all conditions are met. */ private boolean checkConstraints() { long timePassed = System.currentTimeMillis() - controller.getSession().getStartTime(); int maxCrawlTime = configurationReader.getCrawlSpecificationReader().getMaximumRunTime(); if ((maxCrawlTime != 0) && (timePassed > maxCrawlTime * ONE_SECOND)) { LOGGER.info("Max time " + maxCrawlTime + " seconds passed!"); /* stop crawling */ return false; } StateFlowGraph graph = controller.getSession().getStateFlowGraph(); int maxNumberOfStates = configurationReader.getCrawlSpecificationReader().getMaxNumberOfStates(); if ((maxNumberOfStates != 0) && (graph.getAllStates().size() >= maxNumberOfStates)) { LOGGER.info("Max number of states " + maxNumberOfStates + " reached!"); /* stop crawling */ return false; } /* continue crawling */ return true; } }
package com.gildedrose; class GildedRose { private static final int QUALITY_FLOOR = 0; private static final int QUALITY_CEILING = 50; private static final String AGED_BRIE = "Aged Brie"; private static final String BACKSTAGE_PASSES_TO_A_TAFKAL80ETC_CONCERT = "Backstage passes to a TAFKAL80ETC concert"; private static final String SULFURAS_HAND_OF_RAGNAROS = "Sulfuras, Hand of Ragnaros"; Item[] items; public GildedRose(Item[] items) { this.items = items; for (Item item : items) { item.quality = (item.quality < 0) ? 0 : item.quality; } } public void updateQuality() { for (Item item : items) { updateItem(item); } } private void updateItem(Item item) { if (isAgedBrie(item) || isBackstagePass(item)) { incrementQuality(item); if (isBackstagePass(item)) { if (item.sellIn < 11) { incrementQuality(item); } if (item.sellIn < 6) { incrementQuality(item); } } } else { decrementQuality(item); } if (!isSulfurasHandOfRagnaros(item)) { decrementDaysRemainingToSell(item); } if (item.sellIn < 0) { if (!isAgedBrie(item)) { if (!isBackstagePass(item)) { decrementQuality(item); } else { makeWorthless(item); } } else { incrementQuality(item); } } } private void decrementDaysRemainingToSell(Item item) { item.sellIn = item.sellIn - 1; } private boolean isSulfurasHandOfRagnaros(Item item) { return item.name.equals(SULFURAS_HAND_OF_RAGNAROS); } private boolean isAgedBrie(Item item) { return item.name.equals(AGED_BRIE); } private boolean isBackstagePass(Item item) { return item.name.equals(BACKSTAGE_PASSES_TO_A_TAFKAL80ETC_CONCERT); } private void makeWorthless(Item item) { item.quality = item.quality - item.quality; } private void incrementQuality(Item item) { item.quality = Math.min(QUALITY_CEILING, item.quality + 1); } private void decrementQuality(Item item) { if (!isSulfurasHandOfRagnaros(item)) { item.quality = Math.max(QUALITY_FLOOR, item.quality - 1); } } }
package com.hunantv.fw; import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.Part; import com.hunantv.fw.utils.StringUtil; import com.hunantv.fw.utils.WebUtil; import com.hunantv.fw.view.HtmlView; import com.hunantv.fw.view.JsonPView; import com.hunantv.fw.view.JsonView; import com.hunantv.fw.view.RedirectView; import com.hunantv.fw.view.StringView; import com.hunantv.fw.view.View; public class Controller { protected HttpServletRequest request; protected HttpServletResponse response; protected Map<String, String> partParams = new HashMap<String, String>(); public HttpServletRequest getRequest() { return request; } public void setRequest(HttpServletRequest request) { this.request = request; } public HttpServletResponse getResponse() { return response; } public void setResponse(HttpServletResponse response) { this.response = response; } public View renderHtml(String htmlPath) { return new HtmlView(htmlPath, new HashMap<String, Object>()); } public View renderHtml(String htmlPath, Map<String, Object> data) { return new HtmlView(htmlPath, data); } public View renderString(String str) { return new StringView(str); } public View renderJson(Object object) { return new JsonView(object); } public View renderJsonP(Object object) { String callback = this.getStrNormalParam("callback", "JQuery_").trim(); return new JsonPView(callback, object); } public View renderJsonOrJsonP(Object object) { String callback = this.getStrNormalParam("callback", "").trim(); if (callback.length() == 0) // json return new JsonView(object); return new JsonPView(callback, object); // jsonp } public View redirect(String str) { return new RedirectView(str); } // get string params ////////////////////////// public String getStrParam(String name) { return getStrParam(name, null); } public String getStrParam(String name, String defaultValue) { if (WebUtil.isMultipart(request)) { String value = this.getStrPartParam(name); if (null != value) return value; } return this.getStrNormalParam(name, defaultValue); } public String getStrNormalParam(String name) { return this.getStrNormalParam(name, null); } public String getStrNormalParam(String name, String defaultValue) { String value = this.request.getParameter(name); return value == null ? defaultValue : value; } public String getStrPartParam(String name) { return getStrPartParam(name, null); } public String getStrPartParam(String name, String defaultValue) { String value = this.getPartParam(name); return value == null ? defaultValue : value; } // get Integer params ///////////////////////// public Integer getIntegerParam(String name) { return getIntegerParam(name, null); } public Integer getIntegerParam(String name, Integer defaultValue) { if (WebUtil.isMultipart(request)) { Integer value = this.getIntegerPartParam(name); if (null != value) return value; } return this.getIntegerNormalParam(name, defaultValue); } public Integer getIntegerNormalParam(String name) { return getIntegerNormalParam(name, null); } public Integer getIntegerNormalParam(String name, Integer defaultValue) { String value = this.request.getParameter(name); return StringUtil.str2Integer(value, defaultValue); } public Integer getIntegerPartParam(String name) { return getIntegerPartParam(name, null); } public Integer getIntegerPartParam(String name, Integer defaultValue) { String value = getPartParam(name); return StringUtil.str2Integer(value); } // get Long params //////////////////////////// public Long getLongParam(String name) { return getLongParam(name, null); } public Long getLongParam(String name, Long defaultValue) { if (WebUtil.isMultipart(request)) { Long value = this.getLongPartParam(name); if (null != value) return value; } return this.getLongNormalParam(name, defaultValue); } public Long getLongNormalParam(String name) { return getLongNormalParam(name, null); } public Long getLongNormalParam(String name, Long defaultValue) { String value = this.request.getParameter(name); return StringUtil.str2Long(value, defaultValue); } public Long getLongPartParam(String name) { return getLongPartParam(name, null); } public Long getLongPartParam(String name, Integer defaultValue) { String value = getPartParam(name); return StringUtil.str2Long(value); } // get Float params /////////////////////////// public Float getFloatParam(String name) { return getFloatParam(name, null); } public Float getFloatParam(String name, Float defaultValue) { if (WebUtil.isMultipart(request)) { Float value = this.getFloatPartParam(name); if (null != value) return value; } return this.getFloatNormalParam(name, defaultValue); } public Float getFloatNormalParam(String name) { return getFloatNormalParam(name, null); } public Float getFloatNormalParam(String name, Float defaultValue) { String value = this.request.getParameter(name); return StringUtil.str2Float(value, defaultValue); } public Float getFloatPartParam(String name) { return getFloatPartParam(name, null); } public Float getFloatPartParam(String name, Float defaultValue) { String value = getPartParam(name); return StringUtil.str2Float(value); } // get Double params ////////////////////////// public Double getDoubleParam(String name) { return getDoubleParam(name, null); } public Double getDoubleParam(String name, Double defaultValue) { if (WebUtil.isMultipart(request)) { Double value = this.getDoublePartParam(name); if (null != value) return value; } return this.getDoubleNormalParam(name, defaultValue); } public Double getDoubleNormalParam(String name) { return getDoubleNormalParam(name, null); } public Double getDoubleNormalParam(String name, Double defaultValue) { String value = this.request.getParameter(name); return StringUtil.str2Double(value, defaultValue); } public Double getDoublePartParam(String name) { return getDoublePartParam(name, null); } public Double getDoublePartParam(String name, Double defaultValue) { String value = getPartParam(name); return StringUtil.str2Double(value); } // get List params //////////////////////////// public List getListParam(String name) { return getListParam(name, null); } public List getListParam(String name, List defaultValue) { if (WebUtil.isMultipart(request)) { List value = this.getListPartParam(name); if (null != value) return value; } return this.getListNormalParam(name, defaultValue); } public List getListNormalParam(String name) { return this.getListNormalParam(name, null); } public List getListNormalParam(String name, List defaultValue) { String value = this.request.getParameter(name); return StringUtil.str2List(value, defaultValue); } public List getListPartParam(String name) { return this.getListPartParam(name, null); } public List getListPartParam(String name, List defaultValue) { String value = this.getPartParam(name); return StringUtil.str2List(value, defaultValue); } // get Array params /////////////////////////// public String[] getArrayParam(String name) { return getArrayParam(name, null); } public String[] getArrayParam(String name, String[] defaultValue) { if (WebUtil.isMultipart(request)) { String[] value = this.getArrayPartParam(name); if (null != value) return value; } return this.getArrayNormalParam(name, defaultValue); } public String[] getArrayNormalParam(String name) { return this.getArrayNormalParam(name, null); } public String[] getArrayNormalParam(String name, String[] defaultValue) { String value = this.request.getParameter(name); return StringUtil.str2Array(value, defaultValue); } public String[] getArrayPartParam(String name) { return this.getArrayPartParam(name, null); } public String[] getArrayPartParam(String name, String[] defaultValue) { String value = this.getPartParam(name); return StringUtil.str2Array(value, defaultValue); } protected String getPartParam(String name) { try { if (!partParams.containsKey(name)) { Part part = this.request.getPart(name); if (part == null) return null; InputStream in = part.getInputStream(); StringBuilder strb = new StringBuilder(); byte[] bytes = new byte[20 << 10]; int len = -1; while (-1 != (len = in.read(bytes))) { strb.append(new String(bytes, 0, len)); } partParams.put(name, strb.toString()); } return partParams.get(name); } catch (Exception ex) { throw new RuntimeException(ex); } } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:18-07-21"); this.setApiVersion("14.2.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:20-06-18"); this.setApiVersion("16.5.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:21-12-08"); this.setApiVersion("17.14.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:20-07-10"); this.setApiVersion("16.6.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:21-12-05"); this.setApiVersion("17.14.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:18-04-15"); this.setApiVersion("3.3.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:18-09-03"); this.setApiVersion("14.6.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package com.ninty.runtime; public class NiFrame { NiFrame prevFrame; LocalVars localVars; OperandStack operandStack; public NiFrame(int localVarsSize, int operandStackSize){ localVars = new LocalVars(localVarsSize); operandStack = new OperandStack(operandStackSize); } }
package com.podio; import java.net.URI; import java.net.URISyntaxException; import java.util.Locale; import java.util.TimeZone; import org.codehaus.jackson.jaxrs.JacksonJsonProvider; import org.codehaus.jackson.map.DeserializationConfig.Feature; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.SerializationConfig; import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion; import org.codehaus.jackson.map.deser.CustomDeserializerFactory; import org.codehaus.jackson.map.deser.StdDeserializerProvider; import org.codehaus.jackson.map.ser.CustomSerializerFactory; import org.eclipse.jetty.http.HttpHeaders; import org.joda.time.DateTime; import org.joda.time.LocalDate; import com.podio.oauth.OAuthClientCredentials; import com.podio.oauth.OAuthUserCredentials; import com.podio.serialize.DateTimeDeserializer; import com.podio.serialize.DateTimeSerializer; import com.podio.serialize.LocalDateDeserializer; import com.podio.serialize.LocalDateSerializer; import com.podio.serialize.LocaleDeserializer; import com.podio.serialize.LocaleSerializer; import com.podio.serialize.TimeZoneDeserializer; import com.podio.serialize.TimeZoneSerializer; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.config.ClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig; import com.sun.jersey.api.client.filter.GZIPContentEncodingFilter; import com.sun.jersey.multipart.impl.MultiPartWriter; /** * This is the main low level entry point to access the Podio API. Construct * this and pass it to the APIFactory. */ public final class ResourceFactory { private final WebResource apiResource; private final WebResource fileResource; private final LoginFilter loginFilter; public ResourceFactory(OAuthClientCredentials clientCredentials, OAuthUserCredentials userCredentials) { this("api.podio.com", "files.podio.com", 443, true, false, clientCredentials, userCredentials); } public ResourceFactory(String apiHostname, String fileHostname, int port, boolean ssl, boolean dryRun, OAuthClientCredentials clientCredentials, OAuthUserCredentials userCredentials) { ClientConfig config = new DefaultClientConfig(); config.getSingletons().add(getJsonProvider()); config.getClasses().add(MultiPartWriter.class); Client client = Client.create(config); client.addFilter(new GZIPContentEncodingFilter(false)); client.addFilter(new ExceptionFilter()); if (dryRun) { client.addFilter(new DryRunFilter()); } // client.addFilter(new LoggingFilter()); this.apiResource = client.resource(getURI(apiHostname, port, ssl)); apiResource.header(HttpHeaders.USER_AGENT, "Podio Java API Client"); this.fileResource = client.resource(getURI(fileHostname, port, ssl)); fileResource.header(HttpHeaders.USER_AGENT, "Podio Java API Client"); AuthProvider authProvider = new AuthProvider(this, clientCredentials, userCredentials); this.loginFilter = new LoginFilter(authProvider); } private URI getURI(String hostname, int port, boolean ssl) { try { return new URI(ssl ? "https" : "http", null, hostname, port, null, null, null); } catch (URISyntaxException e) { throw new RuntimeException(e); } } private JacksonJsonProvider getJsonProvider() { ObjectMapper mapper = new ObjectMapper(); mapper.disable(Feature.FAIL_ON_UNKNOWN_PROPERTIES); mapper.disable(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS); mapper.setSerializationInclusion(Inclusion.NON_NULL); CustomSerializerFactory serializerFactory = new CustomSerializerFactory(); serializerFactory.addSpecificMapping(DateTime.class, new DateTimeSerializer()); serializerFactory.addSpecificMapping(LocalDate.class, new LocalDateSerializer()); serializerFactory.addGenericMapping(TimeZone.class, new TimeZoneSerializer()); serializerFactory.addSpecificMapping(Locale.class, new LocaleSerializer()); mapper.setSerializerFactory(serializerFactory); CustomDeserializerFactory deserializerFactory = new CustomDeserializerFactory(); deserializerFactory.addSpecificMapping(DateTime.class, new DateTimeDeserializer()); deserializerFactory.addSpecificMapping(LocalDate.class, new LocalDateDeserializer()); deserializerFactory.addSpecificMapping(TimeZone.class, new TimeZoneDeserializer()); deserializerFactory.addSpecificMapping(Locale.class, new LocaleDeserializer()); mapper.setDeserializerProvider(new StdDeserializerProvider( deserializerFactory)); return new CustomJacksonJsonProvider(mapper); } public WebResource getFileResource(String path) { return getFileResource(path, true); } public WebResource getFileResource(String path, boolean secure) { WebResource subResource = fileResource.path(path); if (secure) { subResource.addFilter(this.loginFilter); } return subResource; } public WebResource getApiResource(String path) { return getApiResource(path, true); } public WebResource getApiResource(String path, boolean secure) { WebResource subResource = apiResource.path(path); if (secure) { subResource.addFilter(this.loginFilter); } return subResource; } }
package com.stripe.model; import com.stripe.exception.APIConnectionException; import com.stripe.exception.APIException; import com.stripe.exception.AuthenticationException; import com.stripe.exception.CardException; import com.stripe.exception.InvalidRequestException; import com.stripe.net.APIResource; import com.stripe.net.RequestOptions; import java.util.Map; public class Reversal extends APIResource implements MetadataStore<Transfer> { Integer amount; String currency; Long created; String balanceTransaction; String id; String transfer; Map<String, String> metadata; public Reversal update(Map<String, Object> params) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return update(params, (RequestOptions) null); } @Deprecated public Reversal update(Map<String, Object> params, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return update(params, RequestOptions.builder().setApiKey(apiKey).build()); } public Reversal update(Map<String, Object> params, RequestOptions options) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return request(RequestMethod.POST, this.getInstanceURL(), params, Reversal.class, options); } public String getInstanceURL() { if (this.transfer != null) { return String.format("%s/%s/reversals/%s", classURL(Transfer.class), this.transfer, this.getId()); } return null; } public String getId() { return id; } public Integer getAmount() { return amount; } public void setAmount(Integer amount) { this.amount = amount; } public String getCurrency() { return currency; } public void setCurrency(String currency) { this.currency = currency; } public Long getCreated() { return created; } public void setCreated(Long created) { this.created = created; } public String getBalanceTransaction() { return balanceTransaction; } public void setBalanceTransaction(String balanceTransaction) { this.balanceTransaction = balanceTransaction; } public String getTransfer() { return transfer; } public void setTransfer(String transfer) { this.transfer = transfer; } public Map<String, String> getMetadata() { return metadata; } public void setMetadata(Map<String, String> metadata) { this.metadata = metadata; } }
package controllers; import com.google.gson.JsonObject; import org.jets3t.service.S3Service; import org.jets3t.service.S3ServiceException; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Bucket; import org.jets3t.service.model.S3Object; import org.jets3t.service.security.AWSCredentials; import spark.Request; import spark.Response; import javax.servlet.MultipartConfigElement; import javax.servlet.http.Part; import java.io.*; import java.math.BigInteger; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.Collection; import java.util.Map; public class ApiController { static String awsAccessKey = System.getenv().get("AWS_ACCESS_KEY"); static String awsSecretKey = System.getenv().get("AWS_SECRET_KEY"); private static final String ROOT_URL = "http://d1edk0932xwypd.cloudfront.net/"; public static final String MEEP_SERVICE_URL = "http://54.232.209.214:4567/meeps"; private static SecureRandom random = new SecureRandom(); private static S3Bucket picturesBucket; private static S3Service service; public static void init(){ try { AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey); service = new RestS3Service(awsCredentials); picturesBucket = service.getBucket("wemeep-pictures"); } catch (S3ServiceException e){ System.out.println(e.toString()); } } public static String getRandomString() { return new BigInteger(130, random).toString(32); } public static Response getProfilePicture(Response response, Request request){ JsonObject ret = new JsonObject(); try { DBController controller = new DBController(); String id = request.params(":id"); if(id == null) throw new Exception("Missing user id"); String fileName = controller.getUserPicture(id); if(fileName == null) throw new Exception("User not found"); ret.addProperty("url", ROOT_URL + fileName); } catch (Exception e){ ret.addProperty("Error", e.getMessage()); } finally { response.body(ret.toString()); return response; } } public static Response getCommentPicture(Response response, Request request){ JsonObject ret = new JsonObject(); try { DBController controller = new DBController(); String id = request.params(":id"); if(id == null) throw new Exception("Missing comment id"); String fileName = controller.getCommentPicture(id); if(fileName == null) throw new Exception("Comment not found"); ret.addProperty("url", ROOT_URL + fileName); } catch (Exception e){ ret.addProperty("Error", e.getMessage()); } finally { response.body(ret.toString()); return response; } } /** * Saves a file to a temp file, uploads it to S3 bucket, deletes the temp file and returns the path. * @param response * @param request * @return */ public static Response postProfilePicture(Response response, Request request){ JsonObject ret = new JsonObject(); OutputStream outputStream = null; String id = request.params(":id"); File auxFile = null; if(id == null){ ret.addProperty("Error", "Missing user id"); response.body(ret.getAsString()); return response; } try { MultipartConfigElement multipartConfigElement = new MultipartConfigElement("/temp"); request.raw().setAttribute("org.eclipse.jetty.multipartConfig",multipartConfigElement); Collection<Part> files = request.raw().getParts(); if(files.size() == 0 || files.size() > 1){ throw new Exception("No files or more than 1 file detected"); } Part p = (Part) files.toArray()[0]; if(!p.getName().equals("picture")){ throw new Exception("File must be called picture"); } String extensionRemoved; try { String[] auxDotParts = p.getSubmittedFileName().split("\\."); extensionRemoved = auxDotParts[auxDotParts.length - 1]; } catch (Exception e){ throw new Exception("File must contain extension."); } String fileName = (getRandomString() + "_" + p.getSubmittedFileName()).replaceAll("[^A-Za-z0-9 ]", "") + "." + extensionRemoved; String tempFile = "/" + fileName; //String tempFile = "/Users/santiagomarti/Desktop/" + fileName; InputStream inputStream = p.getInputStream(); auxFile = new File(tempFile); auxFile.createNewFile(); outputStream = new FileOutputStream(auxFile); byte[] auxBytes = new byte[1024]; int read; while((read = inputStream.read(auxBytes)) != -1) { outputStream.write(auxBytes, 0, read); } outputStream.close(); inputStream.close(); if(auxFile.length() > 800 * 1000) throw new Exception("File must be lighter than 800kB"); DBController controller = new DBController(); String existentName = controller.getUserPicture(id); if(existentName != null) service.deleteObject(picturesBucket, existentName); S3Object object = new S3Object(auxFile); service.putObject(picturesBucket, object); ret.addProperty("Success", true); ret.addProperty("url", ROOT_URL + fileName); controller.upsertProfilePicture(fileName, id); } catch (Exception e2){ System.out.println(e2.toString()); ret.addProperty("Error", e2.getMessage()); } finally { response.body(ret.toString()); if(auxFile != null) auxFile.delete(); return response; } } /** * Saves a file to a temp file, uploads it to S3 bucket, deletes the temp file and returns the path. * @param response * @param request * @return */ public synchronized static Response postCommentPicture(Response response, Request request){ JsonObject ret = new JsonObject(); OutputStream outputStream; String meepId = request.params(":id"); System.out.println("1"); if(meepId == null){ ret.addProperty("Error", "Missing meep id"); response.body(ret.getAsString()); return response; } try { System.out.println("2"); //Chequeamos que vengan los datos del sender Map<String, String> urlData = Utils.splitQuery(request.queryString()); if(!urlData.containsKey("senderName") || !urlData.containsKey("senderId")) throw new Exception("senderName or senderId missing"); System.out.println("3"); MultipartConfigElement multipartConfigElement = new MultipartConfigElement("/temp"); System.out.println("3.1"); request.raw().setAttribute("org.eclipse.jetty.multipartConfig", multipartConfigElement); System.out.println("3.2"); Collection<Part> files = request.raw().getParts(); System.out.println("3.3"); if(files.size() == 0 || files.size() > 1){ throw new Exception("No files or more than 1 file detected"); } System.out.println("4"); Part p = (Part) files.toArray()[0]; if(!p.getName().equals("picture")){ throw new Exception("File must be called picture"); } System.out.println("5"); String extensionRemoved; try { String[] auxDotParts = p.getSubmittedFileName().split("\\."); extensionRemoved = auxDotParts[auxDotParts.length - 1]; } catch (Exception e){ throw new Exception("File must contain extension."); } System.out.println("6"); String fileName = (getRandomString() + "_" + p.getSubmittedFileName()).replaceAll("[^A-Za-z0-9 ]", "") + "." + extensionRemoved; String tempFile = "/" + fileName; //String tempFile = "/Users/santiagomarti/Desktop/" + fileName; InputStream inputStream = p.getInputStream(); final File auxFile = new File(tempFile); auxFile.createNewFile(); outputStream = new FileOutputStream(tempFile); int read = 0; byte[] bytes = new byte[1024]; System.out.println("6"); while ((read = inputStream.read(bytes)) != -1) { outputStream.write(bytes, 0, read); } System.out.println("7"); outputStream.close(); inputStream.close(); if(auxFile.length() > 1500 * 1000) throw new Exception("File must be lighter than 1500kB"); System.out.println("8"); //Creamos el comentario en meep service CommentController commentController = new CommentController(); String commentId = commentController.postNewComment(meepId, urlData.get("senderName"), urlData.get("senderId"), fileName); System.out.println("9"); ret.addProperty("Success", true); ret.addProperty("url", ROOT_URL + fileName); ret.addProperty("id", commentId); System.out.println("10"); new Thread(() -> { try { //Guardamos la info de la imagen en DB local DBController controller = new DBController(); String existentName = controller.getCommentPicture(commentId); if (existentName != null) service.deleteObject(picturesBucket, existentName); S3Object object = new S3Object(auxFile); service.putObject(picturesBucket, object); controller.upsertCommentPicture(fileName, commentId); } catch (S3ServiceException | NoSuchAlgorithmException | IOException e){ System.out.println(e.getMessage()); } finally { if(auxFile != null) auxFile.delete(); } }).start(); } catch (Exception e2){ System.out.println(e2.getMessage()); ret.addProperty("Error", e2.getMessage()); ret.addProperty("Error2", e2.toString()); for(int i = 0; i < e2.getStackTrace().length; i++) ret.addProperty("Error: " + i, e2.getStackTrace()[i].toString()); } finally { System.out.println("11"); response.body(ret.toString()); return response; } } }
package flora.core.gui; import flora.core.ConstantsFLORA; import flora.core.block.TileInfuser; import net.minecraft.client.gui.inventory.GuiContainer; import net.minecraft.client.renderer.Tessellator; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.util.ResourceLocation; import net.minecraftforge.fluids.FluidTank; import org.lwjgl.opengl.GL11; import java.util.ArrayList; import java.util.List; public class GuiInfuser extends GuiContainer { TileInfuser tileInfuser; public GuiInfuser(TileInfuser tile, InventoryPlayer inventoryPlayer) { super(new ContainerInfuser(inventoryPlayer, tile)); tileInfuser=tile; } @Override protected void drawGuiContainerForegroundLayer(int mouseX, int mouseY) { GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); this.mc.renderEngine.bindTexture(new ResourceLocation(ConstantsFLORA.GUI_INFUSER_TEX)); this.drawTexturedModalRect(42, 25, 0, ySize, 104, 14); ArrayList<FluidTank> tanks= tileInfuser.getTotalFluidTank(); int total=tileInfuser.getTotalFluidAmount(); int currentX=44; List<String> text=new ArrayList<String>(); int mouseXTranslated=mouseX-guiLeft; int mouseYTranslated=mouseY-guiTop; for(FluidTank tank:tanks){ if(tank.getFluid()!=null){ this.mc.renderEngine.bindTexture(new ResourceLocation(ConstantsFLORA.PREFIX_MOD+"textures/fluid/"+tank.getFluid().getFluid().getName()+".png")); float size=1F*tank.getFluidAmount(); size/=total; size*=100; drawRectangleXRepeated(currentX, 27, 16, 16, 256, 256, (int)size, 10, 16, 1); System.out.println(mouseX); System.out.println(mouseY); if(mouseXTranslated>currentX && mouseXTranslated<(currentX+size) && mouseYTranslated > 27 && mouseYTranslated<38){ text.add(EnumColor.DARK_GREEN+tank.getFluid().getFluid().getLocalizedName()); text.add(EnumColor.DARK_GREEN + "" + tank.getFluidAmount() + "mB" + EnumColor.WHITE); } currentX+=(int)size; } } drawHoveringText(text, mouseXTranslated, mouseYTranslated+30, fontRendererObj); } @Override protected void drawGuiContainerBackgroundLayer(float par1, int par2, int par3) { GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); this.mc.renderEngine.bindTexture(new ResourceLocation(ConstantsFLORA.GUI_INFUSER_TEX)); int x = (width - xSize) / 2; int y = (height - ySize) / 2; this.drawTexturedModalRect(x, y, 0, 0, xSize, ySize); } //This method thanks to Paleocrafter public static void drawRectangleXRepeated(int x, int y, float u, float v, float uMax, float vMax, int width, int height, int tileWidth, int zLevel) { float uvHeight = v - vMax; int numX = (int) Math.ceil((float) width / tileWidth); for (int x2 = 0; x2 < numX; ++x2) { int w = tileWidth; float tileMaxU = uMax; int tileX = w * x2; if (tileWidth > width) { w = width; tileMaxU -= 0.00390625F * (float) w / tileWidth; tileX = w * x2; } else if (x2 == numX - 1) { if (tileWidth > width - x2 * tileWidth) { w = width - x2 * tileWidth; tileMaxU -= 0.00390625F * (float) w / tileWidth; tileX = tileWidth * x2; } } drawRectangleStretched(x + tileX, y, u, v, w, height, tileMaxU, vMax, zLevel); } } public static void drawRectangleStretched(int x, int y, float u, float v, int width, int height, float uMax, float vMax, int zLevel) { float scaleU = 0.00390625F; float scaleV = 0.00390625F; if (u % 1 != 0 || uMax % 1 != 0) scaleU = 1; if (v % 1 != 0 || vMax % 1 != 0) scaleV = 1; Tessellator tessellator = Tessellator.instance; tessellator.startDrawingQuads(); tessellator.addVertexWithUV(x, y + height, zLevel, u * scaleU, vMax * scaleV); tessellator.addVertexWithUV(x + width, y + height, zLevel, uMax * scaleU, vMax * scaleV); tessellator.addVertexWithUV(x + width, y, zLevel, uMax * scaleU, v * scaleV); tessellator.addVertexWithUV(x, y, zLevel, u * scaleU, v * scaleV); tessellator.draw(); } }
package fxlauncher; import com.sun.javafx.application.ParametersImpl; import javax.xml.bind.JAXB; import java.io.IOException; import java.net.URI; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.List; import java.util.Map; public class CreateManifest { public static void main(String[] args) throws IOException { URI baseURI = URI.create(args[0]); String launchClass = args[1]; Path appPath = Paths.get(args[2]); FXManifest manifest = create(baseURI, launchClass, appPath); if (args.length > 3) { // Parse named parameters List<String> rawParams = new ArrayList<>(); for (int i = 3; i < args.length; i++) rawParams.add(args[i]); ParametersImpl params = new ParametersImpl(rawParams); Map<String, String> named = params.getNamed(); // Configure cacheDir if (named != null && named.containsKey("cache-dir")) manifest.cacheDir = named.get("cache-dir"); // Add the raw parameter string to the manifest manifest.parameters = args[3]; } JAXB.marshal(manifest, appPath.resolve("app.xml").toFile()); } public static FXManifest create(URI baseURI, String launchClass, Path appPath) throws IOException { FXManifest manifest = new FXManifest(); manifest.uri = baseURI; manifest.launchClass = launchClass; Files.walkFileTree(appPath, new SimpleFileVisitor<Path>() { public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (!Files.isDirectory(file) && file.toString().endsWith(".jar") && !file.getFileName().toString().startsWith("fxlauncher")) manifest.files.add(new LibraryFile(appPath, file)); return FileVisitResult.CONTINUE; } }); return manifest; } }
package graphql.schema; import graphql.Internal; import graphql.introspection.Introspection; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @Internal public class SchemaUtil { private static final TypeTraverser TRAVERSER = new TypeTraverser(); Map<String, GraphQLType> allTypes(final GraphQLSchema schema, final Set<GraphQLType> additionalTypes) { List<GraphQLType> roots = new ArrayList<>(); roots.add(schema.getQueryType()); if (schema.isSupportingMutations()) { roots.add(schema.getMutationType()); } if (schema.isSupportingSubscriptions()) { roots.add(schema.getSubscriptionType()); } if (additionalTypes != null) { roots.addAll(additionalTypes); } if (schema.getDirectives() != null) { roots.addAll(schema.getDirectives()); } roots.add(Introspection.__Schema); GraphQLTypeCollectingVisitor visitor = new GraphQLTypeCollectingVisitor(); TRAVERSER.depthFirst(visitor, roots); return visitor.getResult(); } /* * Indexes GraphQLObject types registered with the provided schema by implemented GraphQLInterface name * * This helps in accelerates/simplifies collecting types that implement a certain interface * * Provided to replace {@link #findImplementations(graphql.schema.GraphQLSchema, graphql.schema.GraphQLInterfaceType)} * */ Map<String, List<GraphQLObjectType>> groupImplementations(GraphQLSchema schema) { Map<String, List<GraphQLObjectType>> result = new HashMap<>(); for (GraphQLType type : schema.getAllTypesAsList()) { if (type instanceof GraphQLObjectType) { for (GraphQLOutputType interfaceType : ((GraphQLObjectType) type).getInterfaces()) { List<GraphQLObjectType> myGroup = result.computeIfAbsent(interfaceType.getName(), k -> new ArrayList<>()); myGroup.add((GraphQLObjectType) type); } } } return result; } /** * This method is deprecated due to a performance concern. * * The Algorithm complexity: O(n^2), where n is number of registered GraphQLTypes * * That indexing operation is performed twice per input document: * 1. during validation * 2. during execution * * We now indexed all types at the schema creation, which has brought complexity down to O(1) * * @param schema GraphQL schema * @param interfaceType an interface type to find implementations for * * @return List of object types implementing provided interface * * @deprecated use {@link graphql.schema.GraphQLSchema#getImplementations(GraphQLInterfaceType)} instead */ @Deprecated public List<GraphQLObjectType> findImplementations(GraphQLSchema schema, GraphQLInterfaceType interfaceType) { List<GraphQLObjectType> result = new ArrayList<>(); for (GraphQLType type : schema.getAllTypesAsList()) { if (!(type instanceof GraphQLObjectType)) { continue; } GraphQLObjectType objectType = (GraphQLObjectType) type; if ((objectType).getInterfaces().contains(interfaceType)) { result.add(objectType); } } return result; } void replaceTypeReferences(GraphQLSchema schema) { final Map<String, GraphQLType> typeMap = schema.getTypeMap(); List<GraphQLType> roots = new ArrayList<>(typeMap.values()); roots.addAll(schema.getDirectives()); TRAVERSER.depthFirst(new GraphQLTypeResolvingVisitor(typeMap), roots); } }
package hackerrank; import java.util.HashMap; import java.util.Map; import java.util.Scanner; public class BeautifulPairs { private static int beautifulPairs(int[] a, int[] b) { Map<Integer, Integer> countMap = new HashMap<>(); for (int i = 0; i < a.length; i++) { if (!countMap.containsKey(a[i])) { countMap.put(a[i], 1); } else { countMap.put(a[i], countMap.get(a[i]) + 1); } } int result = 0; for (int i = 0; i < b.length; i++) { if (countMap.containsKey(b[i])) { int count = countMap.get(b[i]); int newCount = count - 1; if (newCount == 0) { countMap.remove(b[i]); } else { countMap.put(b[i], newCount); } result++; } } if (countMap.size() == 0) { result } else { result++; } return result; } public static void main(String[] args) { Scanner in = new Scanner(System.in); int n = in.nextInt(); int[] a = new int[n]; for (int i = 0; i < n; i++) { a[i] = in.nextInt(); } int[] b = new int[n]; for (int i = 0; i < n; i++) { b[i] = in.nextInt(); } System.out.println(beautifulPairs(a, b)); } }
package hudson.plugins.git; import hudson.*; import hudson.FilePath.FileCallable; import hudson.matrix.MatrixBuild; import hudson.matrix.MatrixRun; import hudson.model.*; import static hudson.Util.fixEmptyAndTrim; import hudson.plugins.git.browser.GitRepositoryBrowser; import hudson.plugins.git.browser.GitWeb; import hudson.plugins.git.browser.GithubWeb; import hudson.plugins.git.browser.RedmineWeb; import hudson.plugins.git.opt.PreBuildMergeOptions; import hudson.plugins.git.util.*; import hudson.plugins.git.util.Build; import hudson.remoting.VirtualChannel; import hudson.scm.ChangeLogParser; import hudson.scm.RepositoryBrowser; import hudson.scm.RepositoryBrowsers; import hudson.scm.SCM; import hudson.scm.SCMDescriptor; import hudson.util.FormValidation; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import javax.servlet.ServletException; import net.sf.json.JSONException; import net.sf.json.JSONObject; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.spearce.jgit.lib.ObjectId; import org.spearce.jgit.lib.RepositoryConfig; import org.spearce.jgit.transport.RefSpec; import org.spearce.jgit.transport.RemoteConfig; /** * Git SCM. * * @author Nigel Magnay */ public class GitSCM extends SCM implements Serializable { // old fields are left so that old config data can be read in, but // they are deprecated. transient so that they won't show up in XML // when writing back @Deprecated transient String source; @Deprecated transient String branch; /** * Store a config version so we're able to migrate config on various * functionality upgrades. */ private Long configVersion; /** * All the remote repositories that we know about. */ private List<RemoteConfig> remoteRepositories; /** * All the branches that we wish to care about building. */ private List<BranchSpec> branches; /** * Optional local branch to work on. */ private String localBranch; /** * Options for merging before a build. */ private PreBuildMergeOptions mergeOptions; /** * Use --recursive flag on submodule commands - requires git>=1.6.5 */ private boolean recursiveSubmodules; private boolean doGenerateSubmoduleConfigurations; private boolean authorOrCommitter; private boolean clean; private boolean wipeOutWorkspace; private boolean pruneBranches; /** * @deprecated * Replaced by {@link #buildChooser} instead. */ private transient String choosingStrategy; private BuildChooser buildChooser; public String gitTool = null; private GitRepositoryBrowser browser; private Collection<SubmoduleConfig> submoduleCfg; public static final String GIT_BRANCH = "GIT_BRANCH"; public static final String GIT_COMMIT = "GIT_COMMIT"; private String relativeTargetDir; private String excludedRegions; private String excludedUsers; public Collection<SubmoduleConfig> getSubmoduleCfg() { return submoduleCfg; } public void setSubmoduleCfg(Collection<SubmoduleConfig> submoduleCfg) { this.submoduleCfg = submoduleCfg; } /** * A convenience constructor that sets everything to default. * * @param repositoryUrl * Repository URL to clone from. */ public GitSCM(String repositoryUrl) throws IOException { this( DescriptorImpl.createRepositoryConfigurations(new String[]{repositoryUrl},new String[]{null},new String[]{null}), Collections.singletonList(new BranchSpec("")), new PreBuildMergeOptions(), false, Collections.<SubmoduleConfig>emptyList(), false, false, new DefaultBuildChooser(), null, null, false, null, null, null, null, false, false); } @DataBoundConstructor public GitSCM( List<RemoteConfig> repositories, List<BranchSpec> branches, PreBuildMergeOptions mergeOptions, boolean doGenerateSubmoduleConfigurations, Collection<SubmoduleConfig> submoduleCfg, boolean clean, boolean wipeOutWorkspace, BuildChooser buildChooser, GitRepositoryBrowser browser, String gitTool, boolean authorOrCommitter, String relativeTargetDir, String excludedRegions, String excludedUsers, String localBranch, boolean recursiveSubmodules, boolean pruneBranches) { // normalization this.branches = branches; this.localBranch = localBranch; this.remoteRepositories = repositories; this.browser = browser; this.mergeOptions = mergeOptions; this.doGenerateSubmoduleConfigurations = doGenerateSubmoduleConfigurations; this.submoduleCfg = submoduleCfg; this.clean = clean; this.wipeOutWorkspace = wipeOutWorkspace; this.configVersion = 1L; this.gitTool = gitTool; this.authorOrCommitter = authorOrCommitter; this.buildChooser = buildChooser; this.relativeTargetDir = relativeTargetDir; this.excludedRegions = excludedRegions; this.excludedUsers = excludedUsers; this.recursiveSubmodules = recursiveSubmodules; this.pruneBranches = pruneBranches; buildChooser.gitSCM = this; // set the owner } public Object readResolve() { // Migrate data // Default unspecified to v0 if(configVersion == null) configVersion = 0L; if(source!=null) { remoteRepositories = new ArrayList<RemoteConfig>(); branches = new ArrayList<BranchSpec>(); doGenerateSubmoduleConfigurations = false; mergeOptions = new PreBuildMergeOptions(); recursiveSubmodules = false; remoteRepositories.add(newRemoteConfig("origin", source, new RefSpec("+refs/heads/*:refs/remotes/origin/*"))); if(branch != null) { branches.add(new BranchSpec(branch)); } else { branches.add(new BranchSpec("*/master")); } } if(configVersion < 1 && branches != null) { // Migrate the branch specs from // single * wildcard, to ** wildcard. for(BranchSpec branchSpec : branches) { String name = branchSpec.getName(); name = name.replace("*", "**"); branchSpec.setName(name); } } if(mergeOptions.doMerge() && mergeOptions.getMergeRemote() == null) { mergeOptions.setMergeRemote(remoteRepositories.get(0)); } if (choosingStrategy!=null && buildChooser==null) { for (BuildChooserDescriptor d : BuildChooser.all()) { if (choosingStrategy.equals(d.getLegacyId())) try { buildChooser = d.clazz.newInstance(); } catch (InstantiationException e) { LOGGER.log(Level.WARNING, "Failed to instantiate the build chooser",e); } catch (IllegalAccessException e) { LOGGER.log(Level.WARNING, "Failed to instantiate the build chooser",e); } } } if (buildChooser==null) buildChooser = new DefaultBuildChooser(); buildChooser.gitSCM = this; return this; } public String getExcludedRegions() { return excludedRegions; } public String[] getExcludedRegionsNormalized() { return (excludedRegions == null || excludedRegions.trim().equals("")) ? null : excludedRegions.split("[\\r\\n]+"); } private Pattern[] getExcludedRegionsPatterns() { String[] excluded = getExcludedRegionsNormalized(); if (excluded != null) { Pattern[] patterns = new Pattern[excluded.length]; int i = 0; for (String excludedRegion : excluded) { patterns[i++] = Pattern.compile(excludedRegion); } return patterns; } return new Pattern[0]; } public String getExcludedUsers() { return excludedUsers; } public Set<String> getExcludedUsersNormalized() { String s = fixEmptyAndTrim(excludedUsers); if (s==null) return Collections.emptySet(); Set<String> users = new HashSet<String>(); for (String user : s.split("[\\r\\n]+")) users.add(user.trim()); return users; } @Override public GitRepositoryBrowser getBrowser() { return browser; } public boolean getPruneBranches() { return this.pruneBranches; } public boolean getWipeOutWorkspace() { return this.wipeOutWorkspace; } public boolean getClean() { return this.clean; } public BuildChooser getBuildChooser() { return buildChooser; } public List<RemoteConfig> getParamExpandedRepos(AbstractBuild<?,?> build) { if (remoteRepositories == null) return new ArrayList<RemoteConfig>(); else { List<RemoteConfig> expandedRepos = new ArrayList<RemoteConfig>(); for (RemoteConfig oldRepo : remoteRepositories) { expandedRepos.add(newRemoteConfig(oldRepo.getName(), oldRepo.getURIs().get(0).toString(), new RefSpec(getRefSpec(oldRepo, build)))); } return expandedRepos; } } public RemoteConfig getRepositoryByName(String repoName) { for (RemoteConfig r : getRepositories()) { if (r.getName().equals(repoName)) { return r; } } return null; } public List<RemoteConfig> getRepositories() { // Handle null-value to ensure backwards-compatibility, ie project configuration missing the <repositories/> XML element if (remoteRepositories == null) return new ArrayList<RemoteConfig>(); return remoteRepositories; } public String getGitTool() { return gitTool; } private String getRefSpec(RemoteConfig repo, AbstractBuild<?,?> build) { String refSpec = repo.getFetchRefSpecs().get(0).toString(); ParametersAction parameters = build.getAction(ParametersAction.class); if (parameters != null) refSpec = parameters.substitute(build, refSpec); return refSpec; } private String getSingleBranch(AbstractBuild<?, ?> build) { // if we have multiple branches skip to advanced usecase if (getBranches().size() != 1 || getRepositories().size() != 1) return null; String branch = getBranches().get(0).getName(); String repository = getRepositories().get(0).getName(); // replace repository wildcard with repository name if (branch.startsWith("*/")) branch = repository + branch.substring(1); // if the branch name contains more wildcards then the simple usecase // does not apply and we need to skip to the advanced usecase if (branch.contains("*")) return null; // substitute build parameters if available ParametersAction parameters = build.getAction(ParametersAction.class); if (parameters != null) branch = parameters.substitute(build, branch); return branch; } @Override public boolean pollChanges(final AbstractProject project, Launcher launcher, final FilePath workspace, final TaskListener listener) throws IOException, InterruptedException { // Poll for changes. Are there any unbuilt revisions that Hudson ought to build ? listener.getLogger().println("Using strategy: " + buildChooser.getDisplayName()); final AbstractBuild lastBuild = (AbstractBuild)project.getLastBuild(); if(lastBuild != null) { listener.getLogger().println("[poll] Last Build : #" + lastBuild.getNumber()); } else { // If we've never been built before, well, gotta build! listener.getLogger().println("[poll] No previous build, so forcing an initial build."); return true; } final BuildData buildData = fixNull(getBuildData(lastBuild, false)); if(buildData != null && buildData.lastBuild != null) { listener.getLogger().println("[poll] Last Built Revision: " + buildData.lastBuild.revision); } final String singleBranch = getSingleBranch(lastBuild); Label label = project.getAssignedLabel(); final String gitExe; final List<RemoteConfig> paramRepos = getParamExpandedRepos(lastBuild); //If this project is tied onto a node, it's built always there. On other cases, //polling is done on the node which did the last build. if (label != null && label.isSelfLabel()) { if(label.getNodes().iterator().next() != project.getLastBuiltOn()) { listener.getLogger().println("Last build was not on tied node, forcing rebuild."); return true; } gitExe = getGitExe(label.getNodes().iterator().next(), listener); } else { gitExe = getGitExe(project.getLastBuiltOn(), listener); } FilePath workingDirectory = workingDirectory(workspace); // Rebuild if the working directory doesn't exist // I'm actually not 100% sure about this, but I'll leave it in for now. // Update 9/9/2010 - actually, I think this *was* needed, since we weren't doing a better check // for whether we'd ever been built before. But I'm fixing that right now anyway. if (!workingDirectory.exists()) { return true; } final EnvVars environment = GitUtils.getPollEnvironment(project, workspace, launcher, listener); boolean pollChangesResult = workingDirectory.act(new FileCallable<Boolean>() { private static final long serialVersionUID = 1L; public Boolean invoke(File localWorkspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(localWorkspace), listener, environment); if (git.hasGitRepo()) { // Repo is there - do a fetch listener.getLogger().println("Fetching changes from the remote Git repositories"); // Fetch updates for (RemoteConfig remoteRepository : paramRepos) { fetchFrom(git, localWorkspace, listener, remoteRepository); } listener.getLogger().println("Polling for changes in"); Collection<Revision> origCandidates = buildChooser.getCandidateRevisions( true, singleBranch, git, listener, buildData); List<Revision> candidates = new ArrayList<Revision>(); for (Revision c : origCandidates) { if (!isRevExcluded(git, c, listener)) { candidates.add(c); } } return (candidates.size() > 0); } else { listener.getLogger().println("No Git repository yet, an initial checkout is required"); return true; } } }); return pollChangesResult; } private BuildData fixNull(BuildData bd) { return bd!=null ? bd : new BuildData() /*dummy*/; } private void cleanSubmodules(IGitAPI parentGit, File workspace, TaskListener listener, RemoteConfig remoteRepository) { List<IndexEntry> submodules = new GitUtils(listener, parentGit) .getSubmodules("HEAD"); for (IndexEntry submodule : submodules) { try { RemoteConfig submoduleRemoteRepository = getSubmoduleRepository(workspace, remoteRepository, submodule.getFile()); File subdir = new File(workspace, submodule.getFile()); listener.getLogger().println("Trying to clean submodule in " + subdir); IGitAPI subGit = new GitAPI(parentGit.getGitExe(), new FilePath(subdir), listener, parentGit.getEnvironment()); subGit.clean(); } catch (Exception ex) { listener .getLogger() .println( "Problem cleaning submodule in " + submodule.getFile() + " - could be unavailable. Continuing anyway"); } } } /** * Fetch information from a particular remote repository. Attempt to fetch * from submodules, if they exist in the local WC * * @param git * @param listener * @param remoteRepository * @return true if fetch goes through, false otherwise. * @throws */ private boolean fetchFrom(IGitAPI git, File workspace, TaskListener listener, RemoteConfig remoteRepository) { boolean fetched = true; try { git.fetch(remoteRepository); List<IndexEntry> submodules = new GitUtils(listener, git) .getSubmodules("HEAD"); for (IndexEntry submodule : submodules) { try { RemoteConfig submoduleRemoteRepository = getSubmoduleRepository(workspace, remoteRepository, submodule.getFile()); File subdir = new File(workspace, submodule.getFile()); listener.getLogger().println("Trying to fetch " + submodule.getFile() + " into " + subdir); IGitAPI subGit = new GitAPI(git.getGitExe(), new FilePath(subdir), listener, git.getEnvironment()); subGit.fetch(submoduleRemoteRepository); } catch (Exception ex) { listener .getLogger() .println( "Problem fetching from submodule " + submodule.getFile() + " - could be unavailable. Continuing anyway"); } } } catch (GitException ex) { listener.error( "Problem fetching from " + remoteRepository.getName() + " / " + remoteRepository.getName() + " - could be unavailable. Continuing anyway"); fetched = false; } return fetched; } public RemoteConfig getSubmoduleRepository(File aWorkspace, RemoteConfig orig, String name) throws IOException { // Read submodule from .gitmodules BufferedReader bfr = new BufferedReader(new FileReader(aWorkspace + File.separator + ".gitmodules")); String line = ""; boolean isSubmodule = false; try { while((line= bfr.readLine()) != null) { line = line.trim(); if(line.startsWith("[submodule \"" + name )) { isSubmodule = true; } else if (isSubmodule && line.startsWith("url")) { int index = line.indexOf("="); String refUrl = line.substring(index + 1).trim(); return newRemoteConfig(name, refUrl, orig.getFetchRefSpecs().get(0)); } } } catch (IOException e) { throw new GitException("Error in reading .gitmodules", e); } finally { bfr.close(); } // Attempt to guess the submodule URL?? String refUrl = orig.getURIs().get(0).toString(); if (refUrl.endsWith("/.git")) { refUrl = refUrl.substring(0, refUrl.length() - 4); } if (!refUrl.endsWith("/")) refUrl += "/"; refUrl += name; if (!refUrl.endsWith("/")) refUrl += "/"; refUrl += ".git"; return newRemoteConfig(name, refUrl, orig.getFetchRefSpecs().get(0)); } private RemoteConfig newRemoteConfig(String name, String refUrl, RefSpec refSpec) { File temp = null; try { temp = File.createTempFile("tmp", "config"); RepositoryConfig repoConfig = new RepositoryConfig(null, temp); // Make up a repo config from the request parameters repoConfig.setString("remote", name, "url", refUrl); repoConfig.setString("remote", name, "fetch", refSpec.toString()); repoConfig.save(); return RemoteConfig.getAllRemoteConfigs(repoConfig).get(0); } catch(Exception ex) { throw new GitException("Error creating temp file"); } finally { if(temp != null) temp.delete(); } } private boolean changeLogResult(String changeLog, File changelogFile) throws IOException { if (changeLog == null) return false; else { changelogFile.delete(); FileOutputStream fos = new FileOutputStream(changelogFile); fos.write(changeLog.getBytes()); fos.close(); // Write to file return true; } } /** * Exposing so that we can get this from GitPublisher. */ public String getGitExe(Node builtOn, TaskListener listener) { GitTool[] gitToolInstallations = Hudson.getInstance().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations(); for(GitTool t : gitToolInstallations) { //If gitTool is null, use first one. if(gitTool == null) { gitTool = t.getName(); } if(t.getName().equals(gitTool)) { if(builtOn != null){ try { String s = t.forNode(builtOn, listener).getGitExe(); return s; } catch (IOException e) { listener.getLogger().println("Failed to get git executable"); } catch (InterruptedException e) { listener.getLogger().println("Failed to get git executable"); } } } } return null; } /** * If true, use the commit author as the changeset author, rather * than the committer. */ public boolean getAuthorOrCommitter() { return authorOrCommitter; } @Override public boolean checkout(final AbstractBuild build, Launcher launcher, final FilePath workspace, final BuildListener listener, File changelogFile) throws IOException, InterruptedException { Object[] returnData; // Changelog, BuildData listener.getLogger().println("Checkout:" + workspace.getName() + " / " + workspace.getRemote() + " - " + workspace.getChannel()); listener.getLogger().println("Using strategy: " + buildChooser.getDisplayName()); final FilePath workingDirectory = workingDirectory(workspace); if (!workingDirectory.exists()) { workingDirectory.mkdirs(); } final String projectName = build.getProject().getName(); final int buildNumber = build.getNumber(); final String gitExe = getGitExe(build.getBuiltOn(), listener); final String buildnumber = "hudson-" + projectName + "-" + buildNumber; final BuildData buildData = getBuildData(build.getPreviousBuild(), true); if(buildData != null && buildData.lastBuild != null) { listener.getLogger().println("Last Built Revision: " + buildData.lastBuild.revision); } final EnvVars environment = build.getEnvironment(listener); final String singleBranch = getSingleBranch(build); Revision tempParentLastBuiltRev = null; if (build instanceof MatrixRun) { MatrixBuild parentBuild = ((MatrixRun)build).getParentBuild(); if (parentBuild != null) { BuildData parentBuildData = parentBuild.getAction(BuildData.class); if (parentBuildData != null) { tempParentLastBuiltRev = parentBuildData.getLastBuiltRevision(); } } } final List<RemoteConfig> paramRepos = getParamExpandedRepos(build); final Revision parentLastBuiltRev = tempParentLastBuiltRev; final Revision revToBuild = workingDirectory.act(new FileCallable<Revision>() { private static final long serialVersionUID = 1L; public Revision invoke(File localWorkspace, VirtualChannel channel) throws IOException { FilePath ws = new FilePath(localWorkspace); listener.getLogger().println("Checkout:" + ws.getName() + " / " + ws.getRemote() + " - " + ws.getChannel()); IGitAPI git = new GitAPI(gitExe, ws, listener, environment); if (wipeOutWorkspace) { listener.getLogger().println("Wiping out workspace first"); try { ws.deleteContents(); } catch (InterruptedException e) { // I don't really care if this fails. } } if (git.hasGitRepo()) { // It's an update // Do we want to prune first? if (pruneBranches) { listener.getLogger().println("Pruning obsolete local branches"); for (RemoteConfig remoteRepository : paramRepos) { git.prune(remoteRepository); } } listener.getLogger().println("Fetching changes from the remote Git repository"); boolean fetched = false; for (RemoteConfig remoteRepository : paramRepos) { if (fetchFrom(git,localWorkspace,listener,remoteRepository)) { fetched = true; } } if (!fetched) { listener.error("Could not fetch from any repository"); throw new GitException("Could not fetch from any repository"); } } else { listener.getLogger().println("Cloning the remote Git repository"); // Go through the repositories, trying to clone from one boolean successfullyCloned = false; for(RemoteConfig rc : paramRepos) { try { git.clone(rc); successfullyCloned = true; break; } catch(GitException ex) { listener.error("Error cloning remote repo '%s' : %s", rc.getName(), ex.getMessage()); if(ex.getCause() != null) { listener.error("Cause: %s", ex.getCause().getMessage()); } // Failed. Try the next one listener.getLogger().println("Trying next repository"); } } if(!successfullyCloned) { listener.error("Could not clone repository"); throw new GitException("Could not clone"); } boolean fetched = false; // Also do a fetch for (RemoteConfig remoteRepository : paramRepos) { try { git.fetch(remoteRepository); fetched = true; } catch (Exception e) { listener.error( "Problem fetching from " + remoteRepository.getName() + " / " + remoteRepository.getName() + " - could be unavailable. Continuing anyway"); } } if (!fetched) { listener.error("Could not fetch from any repository"); throw new GitException("Could not fetch from any repository"); } if (git.hasGitModules()) { git.submoduleInit(); git.submoduleUpdate(recursiveSubmodules); } } if (parentLastBuiltRev != null) return parentLastBuiltRev; Collection<Revision> candidates = buildChooser.getCandidateRevisions( false, singleBranch, git, listener, buildData); if(candidates.size() == 0) return null; return candidates.iterator().next(); } }); if(revToBuild == null) { // getBuildCandidates should make the last item the last build, so a re-build // will build the last built thing. listener.error("Nothing to do"); return false; } listener.getLogger().println("Commencing build of " + revToBuild); environment.put(GIT_COMMIT, revToBuild.getSha1String()); if (mergeOptions.doMerge()) { if (!revToBuild.containsBranchName(mergeOptions.getRemoteBranchName())) { returnData = workingDirectory.act(new FileCallable<Object[]>() { private static final long serialVersionUID = 1L; public Object[] invoke(File localWorkspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(localWorkspace), listener, environment); // Do we need to merge this revision onto MergeTarget // Only merge if there's a branch to merge that isn't listener.getLogger().println( "Merging " + revToBuild + " onto " + mergeOptions.getMergeTarget()); // checkout origin/blah ObjectId target = git.revParse(mergeOptions.getRemoteBranchName()); if (getLocalBranch() == null) { git.checkout(target.name()); } else { git.checkoutBranch(localBranch, target.name()); } try { git.merge(revToBuild.getSha1().name()); } catch (Exception ex) { listener .getLogger() .println( "Branch not suitable for integration as it does not merge cleanly"); // We still need to tag something to prevent // repetitive builds from happening - tag the // candidate // branch. if (getLocalBranch()==null) { git.checkout(revToBuild.getSha1().name()); } else { git.checkoutBranch(localBranch, revToBuild.getSha1().name()); } git.tag(buildnumber, "Hudson Build + buildNumber); buildData.saveBuild(new Build(revToBuild, buildNumber, Result.FAILURE)); if (getClean()) { listener.getLogger().println("Cleaning workspace"); git.clean(); if (git.hasGitModules()) { git.submoduleClean(recursiveSubmodules); } } return new Object[]{null, buildData}; } if (git.hasGitModules()) { git.submoduleUpdate(recursiveSubmodules); } // Tag the successful merge git.tag(buildnumber, "Hudson Build #" + buildNumber); String changeLog = computeChangeLog(git, revToBuild, listener, buildData); Build build = new Build(revToBuild, buildNumber, null); buildData.saveBuild(build); GitUtils gu = new GitUtils(listener,git); build.mergeRevision = gu.getRevisionForSHA1(target); if (getClean()) { listener.getLogger().println("Cleaning workspace"); git.clean(); if (git.hasGitModules()) { git.submoduleClean(recursiveSubmodules); } } // Fetch the diffs into the changelog file return new Object[]{changeLog, buildData}; } }); BuildData returningBuildData = (BuildData)returnData[1]; build.addAction(returningBuildData); return changeLogResult((String) returnData[0], changelogFile); } } // No merge returnData = workingDirectory.act(new FileCallable<Object[]>() { private static final long serialVersionUID = 1L; public Object[] invoke(File localWorkspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(localWorkspace), listener, environment); // Straight compile-the-branch listener.getLogger().println("Checking out " + revToBuild); if (getLocalBranch()==null) { git.checkout(revToBuild.getSha1().name()); } else { git.checkoutBranch(localBranch, revToBuild.getSha1().name()); } // if(compileSubmoduleCompares) if (doGenerateSubmoduleConfigurations) { SubmoduleCombinator combinator = new SubmoduleCombinator( git, listener, localWorkspace, submoduleCfg); combinator.createSubmoduleCombinations(); } if (git.hasGitModules()) { git.submoduleInit(); git.submoduleSync(); // Git submodule update will only 'fetch' from where it // regards as 'origin'. However, // it is possible that we are building from a // RemoteRepository with changes // that are not in 'origin' AND it may be a new module that // we've only just discovered. // So - try updating from all RRs, then use the submodule // Update to do the checkout // Also, only do this if we're not doing recursive submodules, since that'll // theoretically be dealt with there anyway. if (!recursiveSubmodules) { for (RemoteConfig remoteRepository : paramRepos) { fetchFrom(git, localWorkspace, listener, remoteRepository); } } // Update to the correct checkout git.submoduleUpdate(recursiveSubmodules); } // Tag the successful merge git.tag(buildnumber, "Hudson Build #" + buildNumber); String changeLog = computeChangeLog(git, revToBuild, listener, buildData); buildData.saveBuild(new Build(revToBuild, buildNumber, null)); if (getClean()) { listener.getLogger().println("Cleaning workspace"); git.clean(); } // Fetch the diffs into the changelog file return new Object[]{changeLog, buildData}; } }); build.addAction((Action) returnData[1]); return changeLogResult((String) returnData[0], changelogFile); } /** * Build up change log from all the branches that we've merged into {@code revToBuild} * * @param git * Used for invoking Git * @param revToBuild * Points to the revisiont we'll be building. This includes all the branches we've merged. * @param listener * Used for writing to build console * @param buildData * Information that captures what we did during the last build. We need this for changelog, * or else we won't know where to stop. */ private String computeChangeLog(IGitAPI git, Revision revToBuild, BuildListener listener, BuildData buildData) throws IOException { int histories = 0; StringBuilder changeLog = new StringBuilder(); try { for(Branch b : revToBuild.getBranches()) { Build lastRevWas = buildChooser.prevBuildForChangelog(b.getName(), buildData, git); if(lastRevWas != null) { changeLog.append(putChangelogDiffsIntoFile(git, b.name, lastRevWas.getSHA1().name(), revToBuild.getSha1().name())); histories++; } else { listener.getLogger().println("No change to record in branch " + b.getName()); } } } catch (GitException ge) { changeLog.append("Unable to retrieve changeset"); } if(histories > 1) listener.getLogger().println("Warning : There are multiple branch changesets here"); return changeLog.toString(); } public void buildEnvVars(AbstractBuild build, java.util.Map<String, String> env) { super.buildEnvVars(build, env); String branch = getSingleBranch(build); if(branch != null){ env.put(GIT_BRANCH, branch); } } private String putChangelogDiffsIntoFile(IGitAPI git, String branchName, String revFrom, String revTo) throws IOException { ByteArrayOutputStream fos = new ByteArrayOutputStream(); // fos.write("<data><![CDATA[".getBytes()); String changeset = "Changes in branch " + branchName + ", between " + revFrom + " and " + revTo + "\n"; fos.write(changeset.getBytes()); git.changelog(revFrom, revTo, fos); // fos.write("]]></data>".getBytes()); fos.close(); return fos.toString("UTF-8"); } @Override public ChangeLogParser createChangeLogParser() { return new GitChangeLogParser(getAuthorOrCommitter()); } @Extension public static final class DescriptorImpl extends SCMDescriptor<GitSCM> { private String gitExe; public DescriptorImpl() { super(GitSCM.class, GitRepositoryBrowser.class); load(); } public String getDisplayName() { return "Git"; } public List<BuildChooserDescriptor> getBuildChooserDescriptors() { return BuildChooser.all(); } /** * Lists available toolinstallations. * @return list of available git tools */ public List<GitTool> getGitTools() { GitTool[] gitToolInstallations = Hudson.getInstance().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations(); return Arrays.asList(gitToolInstallations); } /** * Path to git executable. * @deprecated * @see GitTool */ @Deprecated public String getGitExe() { return gitExe; } /** * Old configuration of git executable - exposed so that we can * migrate this setting to GitTool without deprecation warnings. */ public String getOldGitExe() { return gitExe; } public SCM newInstance(StaplerRequest req, JSONObject formData) throws FormException { List<RemoteConfig> remoteRepositories; try { remoteRepositories = createRepositoryConfigurations(req.getParameterValues("git.repo.url"), req.getParameterValues("git.repo.name"), req.getParameterValues("git.repo.refspec")); } catch (IOException e1) { throw new GitException("Error creating repositories", e1); } List<BranchSpec> branches = createBranches(req.getParameterValues("git.branch")); // Make up a repo config from the request parameters PreBuildMergeOptions mergeOptions = createMergeOptions(req.getParameter("git.doMerge"), req.getParameter("git.mergeRemote"), req.getParameter("git.mergeTarget"), remoteRepositories); String[] urls = req.getParameterValues("git.repo.url"); String[] names = req.getParameterValues("git.repo.name"); Collection<SubmoduleConfig> submoduleCfg = new ArrayList<SubmoduleConfig>(); final GitRepositoryBrowser gitBrowser = getBrowserFromRequest(req, formData); String gitTool = req.getParameter("git.gitTool"); return new GitSCM( remoteRepositories, branches, mergeOptions, req.getParameter("git.generate") != null, submoduleCfg, req.getParameter("git.clean") != null, req.getParameter("git.wipeOutWorkspace") != null, req.bindJSON(BuildChooser.class,formData.getJSONObject("buildChooser")), gitBrowser, gitTool, req.getParameter("git.authorOrCommitter") != null, req.getParameter("git.relativeTargetDir"), req.getParameter("git.excludedRegions"), req.getParameter("git.excludedUsers"), req.getParameter("git.localBranch"), req.getParameter("git.recursiveSubmodules") != null, req.getParameter("git.pruneBranches") != null); } /** * Determine the browser from the scmData contained in the {@link StaplerRequest}. * * @param scmData * @return */ private GitRepositoryBrowser getBrowserFromRequest(final StaplerRequest req, final JSONObject scmData) { if (scmData.containsKey("browser")) { return req.bindJSON(GitRepositoryBrowser.class, scmData.getJSONObject("browser")); } else { return null; } } public static List<RemoteConfig> createRepositoryConfigurations(String[] pUrls, String[] repoNames, String[] refSpecs) throws IOException { File temp = File.createTempFile("tmp", "config"); try { return createRepositoryConfigurations(pUrls,repoNames,refSpecs,temp); } finally { temp.delete(); } } /** * @deprecated * Use {@link #createRepositoryConfigurations(String[], String[], String[])} */ public static List<RemoteConfig> createRepositoryConfigurations(String[] pUrls, String[] repoNames, String[] refSpecs, File temp) { List<RemoteConfig> remoteRepositories; RepositoryConfig repoConfig = new RepositoryConfig(null, temp); // Make up a repo config from the request parameters String[] urls = pUrls; String[] names = repoNames; names = GitUtils.fixupNames(names, urls); String[] refs = refSpecs; if (names != null) { for (int i = 0; i < names.length; i++) { String name = names[i]; name = name.replace(' ', '_'); if(refs[i] == null || refs[i].length() == 0) { refs[i] = "+refs/heads/*:refs/remotes/" + name + "/*"; } repoConfig.setString("remote", name, "url", urls[i]); repoConfig.setString("remote", name, "fetch", refs[i]); } } try { repoConfig.save(); remoteRepositories = RemoteConfig.getAllRemoteConfigs(repoConfig); } catch (Exception e) { throw new GitException("Error creating repositories", e); } return remoteRepositories; } public static List<BranchSpec> createBranches(String[] branch) { List<BranchSpec> branches = new ArrayList<BranchSpec>(); String[] branchData = branch; for(int i=0; i<branchData.length;i++) { branches.add(new BranchSpec(branchData[i])); } if(branches.size() == 0) { branches.add(new BranchSpec("*/master")); } return branches; } public static PreBuildMergeOptions createMergeOptions(String doMerge, String pMergeRemote, String mergeTarget, List<RemoteConfig> remoteRepositories) throws FormException { PreBuildMergeOptions mergeOptions = new PreBuildMergeOptions(); if(doMerge != null && doMerge.trim().length() > 0) { RemoteConfig mergeRemote = null; String mergeRemoteName = pMergeRemote.trim(); if (mergeRemoteName.length() == 0) mergeRemote = remoteRepositories.get(0); else for (RemoteConfig remote : remoteRepositories) { if (remote.getName().equals(mergeRemoteName)) { mergeRemote = remote; break; } } if (mergeRemote==null) throw new FormException("No remote repository configured with name '" + mergeRemoteName + "'", "git.mergeRemote"); mergeOptions.setMergeRemote(mergeRemote); mergeOptions.setMergeTarget(mergeTarget); } return mergeOptions; } public static GitWeb createGitWeb(String url) { GitWeb gitWeb = null; String gitWebUrl = url; if (gitWebUrl != null && gitWebUrl.length() > 0) { try { gitWeb = new GitWeb(gitWebUrl); } catch (MalformedURLException e) { throw new GitException("Error creating GitWeb", e); } } return gitWeb; } public FormValidation doGitRemoteNameCheck(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { String mergeRemoteName = req.getParameter("value"); boolean isMerge = req.getParameter("isMerge") != null; // Added isMerge because we don't want to allow empty remote names for tag/branch pushes. if (mergeRemoteName.length() == 0 && isMerge) return FormValidation.ok(); String[] urls = req.getParameterValues("git.repo.url"); String[] names = req.getParameterValues("git.repo.name"); names = GitUtils.fixupNames(names, urls); for (String name : names) { if (name.equals(mergeRemoteName)) return FormValidation.ok(); } return FormValidation.error("No remote repository configured with name '" + mergeRemoteName + "'"); } } private static final long serialVersionUID = 1L; public boolean getRecursiveSubmodules() { return this.recursiveSubmodules; } public boolean getDoGenerate() { return this.doGenerateSubmoduleConfigurations; } public List<BranchSpec> getBranches() { return branches; } public PreBuildMergeOptions getMergeOptions() { return mergeOptions; } /** * Look back as far as needed to find a valid BuildData. BuildData * may not be recorded if an exception occurs in the plugin logic. * @param build * @param clone * @return the last recorded build data */ public BuildData getBuildData(Run build, boolean clone) { BuildData buildData = null; while (build != null) { buildData = build.getAction(BuildData.class); if (buildData != null) break; build = build.getPreviousBuild(); } if (buildData == null) return clone? new BuildData() : null; if (clone) return buildData.clone(); else return buildData; } /** * Given the workspace, gets the working directory, which will be the workspace * if no relative target dir is specified. Otherwise, it'll be "workspace/relativeTargetDir". * * @param workspace * @return working directory */ protected FilePath workingDirectory(final FilePath workspace) { if (relativeTargetDir == null || relativeTargetDir.length() == 0 || relativeTargetDir.equals(".")) { return workspace; } return workspace.child(relativeTargetDir); } public String getLocalBranch() { return Util.fixEmpty(localBranch); } public String getRelativeTargetDir() { return relativeTargetDir; } /** * Given a Revision, check whether it matches any exclusion rules. * * @param git IGitAPI object * @param r Revision object * @param listener * @return true if any exclusion files are matched, false otherwise. */ private boolean isRevExcluded(IGitAPI git, Revision r, TaskListener listener) { try { List<String> revShow = git.showRevision(r); // If the revision info is empty, something went weird, so we'll just // return false. if (revShow.size() == 0) { return false; } GitChangeSet change = new GitChangeSet(revShow, authorOrCommitter); Pattern[] excludedPatterns = getExcludedRegionsPatterns(); Set<String> excludedUsers = getExcludedUsersNormalized(); String author = change.getAuthorName(); if (excludedUsers.contains(author)) { // If the author is an excluded user, don't count this entry as a change listener.getLogger().println("Ignored commit " + r.getSha1String() + ": Found excluded author: " + author); return true; } List<String> paths = new ArrayList<String>(change.getAffectedPaths()); if (paths.isEmpty()) { // If there weren't any changed files here, we're just going to return false. return false; } List<String> excludedPaths = new ArrayList<String>(); if (excludedPatterns.length > 0) { for (String path : paths) { for (Pattern pattern : excludedPatterns) { if (pattern.matcher(path).matches()) { excludedPaths.add(path); break; } } } } // If every affected path is excluded, return true. if (paths.size() == excludedPaths.size()) { listener.getLogger().println("Ignored commit " + r.getSha1String() + ": Found only excluded paths: " + Util.join(excludedPaths, ", ")); return true; } } catch (GitException e) { // If an error was hit getting the revision info, assume something // else entirely is wrong and we don't care, so return false. return false; } // By default, return false. return false; } private static final Logger LOGGER = Logger.getLogger(GitSCM.class.getName()); }
package replicaset; import java.net.UnknownHostException; import org.bson.Document; import com.mongodb.*; import com.mongodb.client.*; /** * Hello world! * */ public class Write { public static void main( String[] args ) throws UnknownHostException, InterruptedException { @SuppressWarnings("resource") MongoClient client = new MongoClient(new ServerAddress("localhost", 30001)); MongoCollection<Document> collection = client.getDatabase("samples").getCollection("ids"); collection.drop(); for (int i = 0; i < Integer.MAX_VALUE; i++) { try { collection.insertOne(new Document("_id", i)); System.out.println("Inserted : " + i); } catch (Exception exception) { System.out.println(exception.getMessage()); } Thread.sleep(500); } } }
package hudson.plugins.git; import com.google.common.collect.Iterables; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import hudson.*; import hudson.init.Initializer; import hudson.matrix.MatrixBuild; import hudson.matrix.MatrixRun; import hudson.model.*; import hudson.model.Descriptor.FormException; import hudson.model.Hudson.MasterComputer; import hudson.plugins.git.browser.GitRepositoryBrowser; import hudson.plugins.git.browser.GitWeb; import hudson.plugins.git.extensions.GitClientConflictException; import hudson.plugins.git.extensions.GitClientType; import hudson.plugins.git.extensions.GitSCMExtension; import hudson.plugins.git.extensions.GitSCMExtensionDescriptor; import hudson.plugins.git.extensions.impl.AuthorInChangelog; import hudson.plugins.git.extensions.impl.LocalBranch; import hudson.plugins.git.extensions.impl.PreBuildMerge; import hudson.plugins.git.extensions.impl.RemotePoll; import hudson.plugins.git.opt.PreBuildMergeOptions; import hudson.plugins.git.util.Build; import hudson.plugins.git.util.*; import hudson.remoting.Channel; import hudson.scm.*; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.triggers.SCMTrigger; import hudson.util.DescribableList; import hudson.util.FormValidation; import hudson.util.IOException2; import hudson.util.IOUtils; import hudson.util.ListBoxModel; import jenkins.model.Jenkins; import net.sf.json.JSONObject; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.transport.RemoteConfig; import org.jenkinsci.plugins.gitclient.ChangelogCommand; import org.jenkinsci.plugins.gitclient.CloneCommand; import org.jenkinsci.plugins.gitclient.Git; import org.jenkinsci.plugins.gitclient.GitClient; import org.jenkinsci.plugins.gitclient.JGitTool; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.export.Exported; import javax.servlet.ServletException; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.Serializable; import java.io.Writer; import java.net.MalformedURLException; import java.text.MessageFormat; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import static hudson.Util.*; import static hudson.init.InitMilestone.JOB_LOADED; import static hudson.init.InitMilestone.PLUGINS_STARTED; import static hudson.scm.PollingResult.*; import static org.apache.commons.lang.StringUtils.isBlank; /** * Git SCM. * * @author Nigel Magnay * @author Andrew Bayer * @author Nicolas Deloof * @author Kohsuke Kawaguchi * ... and many others */ public class GitSCM extends GitSCMBackwardCompatibility { /** * Store a config version so we're able to migrate config on various * functionality upgrades. */ private Long configVersion; /** * All the remote repositories that we know about. */ private List<UserRemoteConfig> userRemoteConfigs; private transient List<RemoteConfig> remoteRepositories; /** * All the branches that we wish to care about building. */ private List<BranchSpec> branches; private boolean doGenerateSubmoduleConfigurations; private BuildChooser buildChooser; public String gitTool = null; private GitRepositoryBrowser browser; private Collection<SubmoduleConfig> submoduleCfg; public static final String GIT_BRANCH = "GIT_BRANCH"; public static final String GIT_COMMIT = "GIT_COMMIT"; public static final String GIT_PREVIOUS_COMMIT = "GIT_PREVIOUS_COMMIT"; /** * All the configured extensions attached to this. */ private DescribableList<GitSCMExtension,GitSCMExtensionDescriptor> extensions; public Collection<SubmoduleConfig> getSubmoduleCfg() { return submoduleCfg; } public void setSubmoduleCfg(Collection<SubmoduleConfig> submoduleCfg) { this.submoduleCfg = submoduleCfg; } static private List<UserRemoteConfig> createRepoList(String url) { List<UserRemoteConfig> repoList = new ArrayList<UserRemoteConfig>(); repoList.add(new UserRemoteConfig(url, null, null)); return repoList; } /** * A convenience constructor that sets everything to default. * * @param repositoryUrl * Repository URL to clone from. */ public GitSCM(String repositoryUrl) { this( createRepoList(repositoryUrl), Collections.singletonList(new BranchSpec("")), false, Collections.<SubmoduleConfig>emptyList(), new DefaultBuildChooser(), null, null, null); } // @Restricted(NoExternalUse.class) // because this keeps changing @DataBoundConstructor public GitSCM( List<UserRemoteConfig> userRemoteConfigs, List<BranchSpec> branches, Boolean doGenerateSubmoduleConfigurations, Collection<SubmoduleConfig> submoduleCfg, BuildChooser buildChooser, GitRepositoryBrowser browser, String gitTool, List<GitSCMExtension> extensions) { // moved from createBranches if (branches == null) { branches = new ArrayList<BranchSpec>(); } if (branches.isEmpty()) { branches.add(new BranchSpec("*/master")); } this.branches = branches; this.userRemoteConfigs = userRemoteConfigs; updateFromUserData(); // TODO: getBrowserFromRequest this.browser = browser; // emulate bindJSON behavior here if (doGenerateSubmoduleConfigurations != null) { this.doGenerateSubmoduleConfigurations = doGenerateSubmoduleConfigurations; } else { this.doGenerateSubmoduleConfigurations = false; } if (submoduleCfg == null) { submoduleCfg = new ArrayList<SubmoduleConfig>(); } this.submoduleCfg = submoduleCfg; this.configVersion = 2L; this.gitTool = gitTool; if (buildChooser==null) buildChooser = new DefaultBuildChooser(); this.buildChooser = buildChooser; buildChooser.gitSCM = this; // set the owner this.extensions = new DescribableList<GitSCMExtension, GitSCMExtensionDescriptor>(Saveable.NOOP,Util.fixNull(extensions)); } /** * All the configured extensions attached to this {@link GitSCM}. * * Going forward this is primarily how we'll support esoteric use cases. * * @since 1.EXTENSION */ public DescribableList<GitSCMExtension, GitSCMExtensionDescriptor> getExtensions() { return extensions; } private void updateFromUserData() throws GitException { // do what newInstance used to do directly from the request data try { String[] pUrls = new String[userRemoteConfigs.size()]; String[] repoNames = new String[userRemoteConfigs.size()]; String[] refSpecs = new String[userRemoteConfigs.size()]; for (int i = 0; i < userRemoteConfigs.size(); ++i) { pUrls[i] = userRemoteConfigs.get(i).getUrl(); repoNames[i] = userRemoteConfigs.get(i).getName(); refSpecs[i] = userRemoteConfigs.get(i).getRefspec(); } this.remoteRepositories = DescriptorImpl.createRepositoryConfigurations(pUrls, repoNames, refSpecs); // TODO: replace with new repositories } catch (IOException e1) { throw new GitException("Error creating repositories", e1); } } public Object readResolve() throws IOException { // Migrate data // Default unspecified to v0 if (configVersion == null) { configVersion = 0L; } if (source != null) { remoteRepositories = new ArrayList<RemoteConfig>(); branches = new ArrayList<BranchSpec>(); doGenerateSubmoduleConfigurations = false; remoteRepositories.add(newRemoteConfig("origin", source, new RefSpec("+refs/heads/*:refs/remotes/origin/*"))); if (branch != null) { branches.add(new BranchSpec(branch)); } else { branches.add(new BranchSpec("*/master")); } } if (configVersion < 1 && branches != null) { // Migrate the branch specs from // single * wildcard, to ** wildcard. for (BranchSpec branchSpec : branches) { String name = branchSpec.getName(); name = name.replace("*", "**"); branchSpec.setName(name); } } if (remoteRepositories != null && userRemoteConfigs == null) { userRemoteConfigs = new ArrayList<UserRemoteConfig>(); for(RemoteConfig cfg : remoteRepositories) { // converted as in config.jelly String url = ""; if (cfg.getURIs().size() > 0 && cfg.getURIs().get(0) != null) url = cfg.getURIs().get(0).toPrivateString(); String refspec = ""; if (cfg.getFetchRefSpecs().size() > 0 && cfg.getFetchRefSpecs().get(0) != null) refspec = cfg.getFetchRefSpecs().get(0).toString(); userRemoteConfigs.add(new UserRemoteConfig(url, cfg.getName(), refspec)); } } // patch internal objects from user data // if (configVersion == 2) { if (remoteRepositories == null) { // if we don't catch GitException here, the whole job fails to load try { updateFromUserData(); } catch (GitException e) { LOGGER.log(Level.WARNING, "Failed to load SCM data", e); } } if (choosingStrategy != null && buildChooser == null) { for (BuildChooserDescriptor d : BuildChooser.all()) { if (choosingStrategy.equals(d.getLegacyId())) { try { buildChooser = d.clazz.newInstance(); } catch (InstantiationException e) { LOGGER.log(Level.WARNING, "Failed to instantiate the build chooser", e); } catch (IllegalAccessException e) { LOGGER.log(Level.WARNING, "Failed to instantiate the build chooser", e); } } } } if (buildChooser == null) { buildChooser = new DefaultBuildChooser(); } buildChooser.gitSCM = this; if (extensions==null) extensions = new DescribableList<GitSCMExtension, GitSCMExtensionDescriptor>(Saveable.NOOP); readBackExtensionsFromLegacy(); return this; } @Override public GitRepositoryBrowser getBrowser() { return browser; } public boolean isCreateAccountBasedOnEmail() { DescriptorImpl gitDescriptor = getDescriptor(); return (gitDescriptor != null && gitDescriptor.isCreateAccountBasedOnEmail()); } public BuildChooser getBuildChooser() { return buildChooser; } public void setBuildChooser(BuildChooser buildChooser) { this.buildChooser = buildChooser; } /** * Gets the parameter-expanded effective value in the context of the current build. */ public String getParamLocalBranch(AbstractBuild<?, ?> build) { LocalBranch lb = getExtensions().get(LocalBranch.class); return GitSCM.getParameterString(lb!=null?lb.getLocalBranch():null, build); } /** * Expand parameters in {@link #remoteRepositories} with the parameter values provided in the given build * and return them. * * @return can be empty but never null. */ public List<RemoteConfig> getParamExpandedRepos(AbstractBuild<?, ?> build) { List<RemoteConfig> expandedRepos = new ArrayList<RemoteConfig>(); for (RemoteConfig oldRepo : Util.fixNull(remoteRepositories)) { expandedRepos.add(newRemoteConfig(getParameterString(oldRepo.getName(), build), getParameterString(oldRepo.getURIs().get(0).toPrivateString(), build), new RefSpec(getRefSpec(oldRepo, build)))); } return expandedRepos; } public RemoteConfig getRepositoryByName(String repoName) { for (RemoteConfig r : getRepositories()) { if (r.getName().equals(repoName)) { return r; } } return null; } @Exported public List<UserRemoteConfig> getUserRemoteConfigs() { return Collections.unmodifiableList(userRemoteConfigs); } @Exported public List<RemoteConfig> getRepositories() { // Handle null-value to ensure backwards-compatibility, ie project configuration missing the <repositories/> XML element if (remoteRepositories == null) { return new ArrayList<RemoteConfig>(); } return remoteRepositories; } public String getGitTool() { return gitTool; } public static String getParameterString(String original, AbstractBuild<?, ?> build) { ParametersAction parameters = build.getAction(ParametersAction.class); if (parameters != null) { original = parameters.substitute(build, original); } return original; } private String getRefSpec(RemoteConfig repo, AbstractBuild<?, ?> build) { String refSpec = repo.getFetchRefSpecs().get(0).toString(); return getParameterString(refSpec, build); } /** * If the configuration is such that we are tracking just one branch of one repository * return that branch specifier (in the form of something like "origin/master" * * Otherwise return null. */ private String getSingleBranch(AbstractBuild<?, ?> build) { // if we have multiple branches skip to advanced usecase if (getBranches().size() != 1 || getRepositories().size() != 1) { return null; } String branch = getBranches().get(0).getName(); String repository = getRepositories().get(0).getName(); // replace repository wildcard with repository name if (branch.startsWith("*/")) { branch = repository + branch.substring(1); } // if the branch name contains more wildcards then the simple usecase // does not apply and we need to skip to the advanced usecase if (branch.contains("*")) { return null; } // substitute build parameters if available branch = getParameterString(branch, build); // Check for empty string - replace with "**" when seen. if (branch.equals("")) { branch = "**"; } return branch; } @Override public SCMRevisionState calcRevisionsFromBuild(AbstractBuild<?, ?> abstractBuild, Launcher launcher, TaskListener taskListener) throws IOException, InterruptedException { return SCMRevisionState.NONE; } @Override public boolean requiresWorkspaceForPolling() { return getExtensions().get(RemotePoll.class)==null; } @Override protected PollingResult compareRemoteRevisionWith(AbstractProject<?, ?> project, Launcher launcher, FilePath workspace, final TaskListener listener, SCMRevisionState baseline) throws IOException, InterruptedException { try { return compareRemoteRevisionWithImpl( project, launcher, workspace, listener, baseline); } catch (GitException e){ throw new IOException2(e); } } private PollingResult compareRemoteRevisionWithImpl(AbstractProject<?, ?> project, Launcher launcher, FilePath workspace, final TaskListener listener, SCMRevisionState baseline) throws IOException, InterruptedException { // Poll for changes. Are there any unbuilt revisions that Hudson ought to build ? listener.getLogger().println("Using strategy: " + buildChooser.getDisplayName()); final AbstractBuild lastBuild = project.getLastBuild(); if (lastBuild == null) { // If we've never been built before, well, gotta build! listener.getLogger().println("[poll] No previous build, so forcing an initial build."); return BUILD_NOW; } final BuildData buildData = fixNull(getBuildData(lastBuild)); if (buildData.lastBuild != null) { listener.getLogger().println("[poll] Last Built Revision: " + buildData.lastBuild.revision); } final String singleBranch = getSingleBranch(lastBuild); // fast remote polling needs a single branch and an existing last build if (getExtensions().get(RemotePoll.class)!=null && singleBranch != null && buildData.lastBuild != null && buildData.lastBuild.getMarked() != null) { final EnvVars environment = GitUtils.getPollEnvironment(project, workspace, launcher, listener, false); GitClient git = createClient(listener, environment, Jenkins.getInstance(), null); String gitRepo = getParamExpandedRepos(lastBuild).get(0).getURIs().get(0).toString(); ObjectId head = git.getHeadRev(gitRepo, getBranches().get(0).getName()); if (head != null && buildData.lastBuild.getMarked().getSha1().equals(head)) { return NO_CHANGES; } else { return BUILD_NOW; } } final EnvVars environment = GitUtils.getPollEnvironment(project, workspace, launcher, listener); FilePath workingDirectory = workingDirectory(project,workspace,environment,listener); // Rebuild if the working directory doesn't exist // I'm actually not 100% sure about this, but I'll leave it in for now. // Update 9/9/2010 - actually, I think this *was* needed, since we weren't doing a better check // for whether we'd ever been built before. But I'm fixing that right now anyway. // JENKINS-10880: workingDirectory can be null if (workingDirectory == null || !workingDirectory.exists()) { return BUILD_NOW; } // which node is this workspace from? Node n = Jenkins.getInstance(); if (workspace.isRemote()) { // there should be always one match, but just in case we initialize n to a non-null value for (Computer c : Jenkins.getInstance().getComputers()) { if (c.getChannel()==workspace.getChannel()) { n = c.getNode(); break; } } } GitClient git = createClient(listener, environment, n, workingDirectory); if (git.hasGitRepo()) { // Repo is there - do a fetch listener.getLogger().println("Fetching changes from the remote Git repositories"); // Fetch updates for (RemoteConfig remoteRepository : getParamExpandedRepos(lastBuild)) { fetchFrom(git, listener, remoteRepository); } listener.getLogger().println("Polling for changes in"); Collection<Revision> candidates = buildChooser.getCandidateRevisions( true, singleBranch, git, listener, buildData, new BuildChooserContextImpl(project,null)); for (Revision c : candidates) { if (!isRevExcluded(git, c, listener, buildData)) { return PollingResult.SIGNIFICANT; } } return NO_CHANGES; } else { listener.getLogger().println("No Git repository yet, an initial checkout is required"); return PollingResult.SIGNIFICANT; } } /** * Allows {@link Builder}s and {@link Publisher}s to access a configured {@link GitClient} object to * perform additional git operations. */ public GitClient createClient(BuildListener listener, EnvVars environment, AbstractBuild<?,?> build) throws IOException, InterruptedException { FilePath ws = workingDirectory(build.getProject(), build.getWorkspace(), environment, listener); ws.mkdirs(); // ensure it exists return createClient(listener,environment,build.getBuiltOn(),ws); } /*package*/ GitClient createClient(TaskListener listener, EnvVars environment, Node n, FilePath ws) throws IOException, InterruptedException { String gitExe = getGitExe(n, listener); Git git = Git.with(listener, environment).in(ws).using(gitExe); GitClient c = git.getClient(); for (GitSCMExtension ext : extensions) { c = ext.decorate(this,c); } return c; } private BuildData fixNull(BuildData bd) { return bd != null ? bd : new BuildData(getScmName(), getUserRemoteConfigs()) /*dummy*/; } /** * Fetch information from a particular remote repository. * * @param git * @param listener * @param remoteRepository * @throws */ private void fetchFrom(GitClient git, TaskListener listener, RemoteConfig remoteRepository) throws InterruptedException { String name = remoteRepository.getName(); // Assume there is only 1 URL / refspec for simplicity String url = remoteRepository.getURIs().get(0).toPrivateString(); try { git.setRemoteUrl(name, url); git.fetch(name, remoteRepository.getFetchRefSpecs().get(0)); } catch (GitException ex) { throw new GitException("Failed to fetch from "+name+": "+url,ex); } } private RemoteConfig newRemoteConfig(String name, String refUrl, RefSpec refSpec) { try { Config repoConfig = new Config(); // Make up a repo config from the request parameters repoConfig.setString("remote", name, "url", refUrl); repoConfig.setString("remote", name, "fetch", refSpec.toString()); return RemoteConfig.getAllRemoteConfigs(repoConfig).get(0); } catch (Exception ex) { throw new GitException("Error trying to create JGit configuration", ex); } } public GitTool resolveGitTool(TaskListener listener) { if (gitTool == null) return GitTool.getDefaultInstallation(); GitTool git = Jenkins.getInstance().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallation(gitTool); if (git == null) { listener.getLogger().println("selected Git installation does not exists. Using Default"); git = GitTool.getDefaultInstallation(); } return git; } public String getGitExe(Node builtOn, TaskListener listener) { return getGitExe(builtOn, null, listener); } /** * Exposing so that we can get this from GitPublisher. */ public String getGitExe(Node builtOn, EnvVars env, TaskListener listener) { GitClientType client = GitClientType.ANY; for (GitSCMExtension ext : extensions) { try { client = client.combine(ext.getRequiredClient()); } catch (GitClientConflictException e) { throw new RuntimeException(ext.getDescriptor().getDisplayName() + " extended Git behavior is incompatible with other behaviors"); } } if (client == GitClientType.JGIT) return JGitTool.MAGIC_EXENAME; GitTool tool = resolveGitTool(listener); if (builtOn != null) { try { tool = tool.forNode(builtOn, listener); } catch (IOException e) { listener.getLogger().println("Failed to get git executable"); } catch (InterruptedException e) { listener.getLogger().println("Failed to get git executable"); } } if (env != null) { tool = tool.forEnvironment(env); } return tool.getGitExe(); } /** * Web-bound method to let people look up a build by their SHA1 commit. */ public AbstractBuild<?,?> getBySHA1(String sha1) { AbstractProject<?,?> p = Stapler.getCurrentRequest().findAncestorObject(AbstractProject.class); for (AbstractBuild b : p.getBuilds()) { BuildData d = b.getAction(BuildData.class); if (d!=null && d.lastBuild!=null) { Build lb = d.lastBuild; if (lb.isFor(sha1)) return b; } } return null; } /*package*/ static class BuildChooserContextImpl implements BuildChooserContext, Serializable { final AbstractProject project; final AbstractBuild build; BuildChooserContextImpl(AbstractProject project, AbstractBuild build) { this.project = project; this.build = build; } public <T> T actOnBuild(ContextCallable<AbstractBuild<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(build,Hudson.MasterComputer.localChannel); } public <T> T actOnProject(ContextCallable<AbstractProject<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(project, MasterComputer.localChannel); } public AbstractBuild<?, ?> getBuild() { return build; } private Object writeReplace() { return Channel.current().export(BuildChooserContext.class,new BuildChooserContext() { public <T> T actOnBuild(ContextCallable<AbstractBuild<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(build,Channel.current()); } public <T> T actOnProject(ContextCallable<AbstractProject<?,?>, T> callable) throws IOException, InterruptedException { return callable.invoke(project,Channel.current()); } public AbstractBuild<?, ?> getBuild() { return build; } }); } } /** * Determines the commit to be built in this round, updating the working tree accordingly, * and return the information about the selected commit. * * <p> * For robustness, this method shouldn't assume too much about the state of the working tree when this method * is called. In a general case, a working tree is a left-over from the previous build, so it can be quite * messed up (such as HEAD pointing to a random branch.) It is expected that this method brings it back * to the predictable clean state by the time this method returns. */ private @NonNull Build determineRevisionToBuild(final AbstractBuild build, final BuildData buildData, final EnvVars environment, final GitClient git, final BuildListener listener) throws IOException, InterruptedException { PrintStream log = listener.getLogger(); // every MatrixRun should build the exact same commit ID if (build instanceof MatrixRun) { MatrixBuild parentBuild = ((MatrixRun) build).getParentBuild(); if (parentBuild != null) { BuildData parentBuildData = getBuildData(parentBuild); if (parentBuildData != null) { Build lastBuild = parentBuildData.lastBuild; if (lastBuild!=null) return lastBuild; } } } // parameter forcing the commit ID to build final RevisionParameterAction rpa = build.getAction(RevisionParameterAction.class); if (rpa != null) return new Build(rpa.toRevision(git), build.getNumber(), null); final BuildChooserContext context = new BuildChooserContextImpl(build.getProject(), build); Collection<Revision> candidates = buildChooser.getCandidateRevisions( false, environment.expand( getSingleBranch(build) ), git, listener, buildData, context); if (candidates.size() == 0) { // getBuildCandidates should make the last item the last build, so a re-build // will build the last built thing. throw new AbortException("Couldn't find any revision to build. Verify the repository and branch configuration for this job."); } if (candidates.size() > 1) { log.println("Multiple candidate revisions"); AbstractProject<?, ?> project = build.getProject(); if (!project.isDisabled()) { log.println("Scheduling another build to catch up with " + project.getFullDisplayName()); if (!project.scheduleBuild(0, new SCMTrigger.SCMTriggerCause())) { log.println("WARNING: multiple candidate revisions, but unable to schedule build of " + project.getFullDisplayName()); } } } Revision rev = candidates.iterator().next(); Revision marked = rev; for (GitSCMExtension ext : extensions) { rev = ext.decorateRevisionToBuild(this,build,git,listener,rev); } return new Build(marked, rev, build.getNumber(), null); } /** * Retrieve Git objects from the specified remotes by doing the likes of clone/fetch/pull/etc. * * By the end of this method, remote refs are updated to include all the commits found in the remote servers. */ private void retrieveChanges(AbstractBuild build, GitClient git, BuildListener listener) throws IOException, InterruptedException { final PrintStream log = listener.getLogger(); List<RemoteConfig> repos = getParamExpandedRepos(build); if (repos.isEmpty()) return; // defensive check even though this is an invalid configuration if (git.hasGitRepo()) { // It's an update if (repos.size() == 1) log.println("Fetching changes from the remote Git repository"); else log.println(MessageFormat.format("Fetching changes from {0} remote Git repositories", repos.size())); } else { log.println("Cloning the remote Git repository"); // Clone from the first and then fetch from the rest RemoteConfig rc = repos.get(0); repos = repos.subList(1,repos.size()); try { CloneCommand cmd = git.clone_().url(rc.getURIs().get(0).toPrivateString()).repositoryName(rc.getName()); for (GitSCMExtension ext : extensions) { ext.decorateCloneCommand(this, build, git, listener, cmd); } cmd.execute(); } catch (GitException ex) { ex.printStackTrace(listener.error("Error cloning remote repo '%s'", rc.getName())); throw new AbortException(); } } for (RemoteConfig remoteRepository : repos) { fetchFrom(git, listener, remoteRepository); } } @Override public boolean checkout(AbstractBuild build, Launcher launcher, FilePath workspace, BuildListener listener, File changelogFile) throws IOException, InterruptedException { if (VERBOSE) listener.getLogger().println("Using strategy: " + buildChooser.getDisplayName()); BuildData previousBuildData = getBuildData(build.getPreviousBuild()); // read only BuildData buildData = copyBuildData(build.getPreviousBuild()); build.addAction(buildData); if (VERBOSE && buildData.lastBuild != null) { listener.getLogger().println("Last Built Revision: " + buildData.lastBuild.revision); } EnvVars environment = build.getEnvironment(listener); GitClient git = createClient(listener,environment,build); for (GitSCMExtension ext : extensions) { ext.beforeCheckout(this, build, git, listener); } retrieveChanges(build, git, listener); Build revToBuild = determineRevisionToBuild(build, buildData, environment, git, listener); environment.put(GIT_COMMIT, revToBuild.revision.getSha1String()); Branch branch = Iterables.getFirst(revToBuild.revision.getBranches(),null); if (branch!=null) // null for a detached HEAD environment.put(GIT_BRANCH, branch.getName()); listener.getLogger().println("Checking out " + revToBuild.revision); git.checkoutBranch(getParamLocalBranch(build), revToBuild.revision.getSha1String()); buildData.saveBuild(revToBuild); build.addAction(new GitTagAction(build, buildData)); computeChangeLog(git, revToBuild.revision, listener, previousBuildData, new FilePath(changelogFile), new BuildChooserContextImpl(build.getProject(), build)); for (GitSCMExtension ext : extensions) { ext.onCheckoutCompleted(this, build, git,listener); } return true; } private void computeChangeLog(GitClient git, Revision revToBuild, BuildListener listener, BuildData previousBuildData, FilePath changelogFile, BuildChooserContext context) throws IOException, InterruptedException { Writer out = new OutputStreamWriter(changelogFile.write(),"UTF-8"); ChangelogCommand changelog = git.changelog(); changelog.includes(revToBuild.getSha1()); try { boolean exclusion = false; for (Branch b : revToBuild.getBranches()) { Build lastRevWas = buildChooser.prevBuildForChangelog(b.getName(), previousBuildData, git, context); if (lastRevWas != null && git.isCommitInRepo(lastRevWas.getSHA1())) { changelog.excludes(lastRevWas.getSHA1()); exclusion = true; } } if (!exclusion) { // this is the first time we are building this branch, so there's no base line to compare against. // if we force the changelog, it'll contain all the changes in the repo, which is not what we want. listener.getLogger().println("First time build. Skipping changelog."); } else { changelog.to(out).max(MAX_CHANGELOG).execute(); } } catch (GitException ge) { ge.printStackTrace(listener.error("Unable to retrieve changeset")); } finally { IOUtils.closeQuietly(out); } } public void buildEnvVars(AbstractBuild<?, ?> build, java.util.Map<String, String> env) { super.buildEnvVars(build, env); Revision rev = fixNull(getBuildData(build)).getLastBuiltRevision(); if (rev!=null) { Branch branch = Iterables.getFirst(rev.getBranches(), null); if (branch!=null) { env.put(GIT_BRANCH, branch.getName()); String prevCommit = getLastBuiltCommitOfBranch(build, branch); if (prevCommit != null) { env.put(GIT_PREVIOUS_COMMIT, prevCommit); } } env.put(GIT_COMMIT, fixEmpty(rev.getSha1String())); } if(userRemoteConfigs.size()==1){ env.put("GIT_URL", userRemoteConfigs.get(0).getUrl()); }else{ int count=1; for(UserRemoteConfig config:userRemoteConfigs) { env.put("GIT_URL_"+count, config.getUrl()); count++; } } getDescriptor().populateEnvironmentVariables(env); for (GitSCMExtension ext : extensions) { ext.populateEnvironmentVariables(this, env); } } private String getLastBuiltCommitOfBranch(AbstractBuild<?, ?> build, Branch branch) { String prevCommit = null; if (build.getPreviousBuiltBuild() != null) { final Build lastBuildOfBranch = fixNull(getBuildData(build.getPreviousBuiltBuild())).getLastBuildOfBranch(branch.getName()); if (lastBuildOfBranch != null) { Revision previousRev = lastBuildOfBranch.getRevision(); if (previousRev != null) { prevCommit = previousRev.getSha1String(); } } } return prevCommit; } @Override public ChangeLogParser createChangeLogParser() { return new GitChangeLogParser(getExtensions().get(AuthorInChangelog.class)!=null); } @Extension public static final class DescriptorImpl extends SCMDescriptor<GitSCM> { private String gitExe; private String globalConfigName; private String globalConfigEmail; private boolean createAccountBasedOnEmail; // private GitClientType defaultClientType = GitClientType.GITCLI; public DescriptorImpl() { super(GitSCM.class, GitRepositoryBrowser.class); load(); } public String getDisplayName() { return "Git"; } public List<BuildChooserDescriptor> getBuildChooserDescriptors() { return BuildChooser.all(); } public List<GitSCMExtensionDescriptor> getExtensionDescriptors() { return GitSCMExtensionDescriptor.all(); } public boolean showGitToolOptions() { return Jenkins.getInstance().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations().length>1; } /** * Lists available toolinstallations. * @return list of available git tools */ public List<GitTool> getGitTools() { GitTool[] gitToolInstallations = Hudson.getInstance().getDescriptorByType(GitTool.DescriptorImpl.class).getInstallations(); return Arrays.asList(gitToolInstallations); } public ListBoxModel doFillGitToolItems() { ListBoxModel r = new ListBoxModel(); for (GitTool git : getGitTools()) { r.add(git.getName()); } return r; } /** * Path to git executable. * @deprecated * @see GitTool */ @Deprecated public String getGitExe() { return gitExe; } /** * Global setting to be used in call to "git config user.name". */ public String getGlobalConfigName() { return fixEmptyAndTrim(globalConfigName); } public void setGlobalConfigName(String globalConfigName) { this.globalConfigName = globalConfigName; } /** * Global setting to be used in call to "git config user.email". */ public String getGlobalConfigEmail() { return fixEmptyAndTrim(globalConfigEmail); } public void setGlobalConfigEmail(String globalConfigEmail) { this.globalConfigEmail = globalConfigEmail; } public boolean isCreateAccountBasedOnEmail() { return createAccountBasedOnEmail; } public void setCreateAccountBasedOnEmail(boolean createAccountBasedOnEmail) { this.createAccountBasedOnEmail = createAccountBasedOnEmail; } /** * Old configuration of git executable - exposed so that we can * migrate this setting to GitTool without deprecation warnings. */ public String getOldGitExe() { return gitExe; } public SCM newInstance(StaplerRequest req, JSONObject formData) throws FormException { return super.newInstance(req, formData); /* List<RemoteConfig> remoteRepositories; try { remoteRepositories = createRepositoryConfigurations(req.getParameterValues("git.repo.url"), req.getParameterValues("git.repo.name"), req.getParameterValues("git.repo.refspec")); } catch (IOException e1) { throw new GitException("Error creating repositories", e1); } List<BranchSpec> branches = createBranches(req.getParameterValues("git.branch")); // Make up a repo config from the request parameters PreBuildMergeOptions mergeOptions = createMergeOptions(req.getParameter("git.doMerge"), req.getParameter("git.mergeRemote"), req.getParameter("git.mergeTarget"), remoteRepositories); String[] urls = req.getParameterValues("git.repo.url"); String[] names = req.getParameterValues("git.repo.name"); Collection<SubmoduleConfig> submoduleCfg = new ArrayList<SubmoduleConfig>(); final GitRepositoryBrowser gitBrowser = getBrowserFromRequest(req, formData); String gitTool = req.getParameter("git.gitTool"); return new GitSCM( remoteRepositories, branches, mergeOptions, req.getParameter("git.generate") != null, submoduleCfg, req.getParameter("git.clean") != null, req.getParameter("git.wipeOutWorkspace") != null, req.bindJSON(BuildChooser.class,formData.getJSONObject("buildChooser")), gitBrowser, gitTool, req.getParameter("git.authorOrCommitter") != null, req.getParameter("git.relativeTargetDir"), req.getParameter("git.excludedRegions"), req.getParameter("git.excludedUsers"), req.getParameter("git.localBranch"), req.getParameter("git.recursiveSubmodules") != null, req.getParameter("git.pruneBranches") != null, req.getParameter("git.gitConfigName"), req.getParameter("git.gitConfigEmail"), req.getParameter("git.skipTag") != null); */ } public static List<RemoteConfig> createRepositoryConfigurations(String[] urls, String[] repoNames, String[] refs) throws IOException { List<RemoteConfig> remoteRepositories; Config repoConfig = new Config(); // Make up a repo config from the request parameters String[] names = repoNames; names = GitUtils.fixupNames(names, urls); if (names != null) { for (int i = 0; i < names.length; i++) { String name = names[i]; name = name.replace(' ', '_'); if (isBlank(refs[i])) { /** * Fill in the environment variables for launching git */ public void populateEnvironmentVariables(Map<String,String> env) { String name = getGlobalConfigName(); if (name!=null) { env.put("GIT_COMMITTER_NAME", name); env.put("GIT_AUTHOR_NAME", name); } String email = getGlobalConfigEmail(); if (email!=null) { env.put("GIT_COMMITTER_EMAIL", email); env.put("GIT_AUTHOR_EMAIL", email); } } // public GitClientType getDefaultClientType() { // return defaultClientType; // public void setDefaultClientType(String defaultClientType) { // this.defaultClientType = GitClientType.valueOf(defaultClientType); } private static final long serialVersionUID = 1L; public boolean isDoGenerateSubmoduleConfigurations() { return this.doGenerateSubmoduleConfigurations; } @Exported public List<BranchSpec> getBranches() { return branches; } /** * Use {@link PreBuildMerge}. */ @Exported @Deprecated public PreBuildMergeOptions getMergeOptions() throws FormException { return DescriptorImpl.createMergeOptions(getUserMergeOptions(), remoteRepositories); } private boolean isRelevantBuildData(BuildData bd) { for(UserRemoteConfig c : getUserRemoteConfigs()) { if(bd.hasBeenReferenced(c.getUrl())) { return true; } } return false; } /** * @deprecated */ public BuildData getBuildData(Run build, boolean clone) { return clone ? copyBuildData(build) : getBuildData(build); } /** * Like {@link #getBuildData(Run)}, but copy the data into a new object, * which is used as the first step for updating the data for the next build. */ public BuildData copyBuildData(Run build) { BuildData base = getBuildData(build); if (base==null) return new BuildData(getScmName(), getUserRemoteConfigs()); else return base.clone(); } /** * Find the build log (BuildData) recorded with the last build that completed. BuildData * may not be recorded if an exception occurs in the plugin logic. * * @param build * @return the last recorded build data */ public @CheckForNull BuildData getBuildData(Run build) { BuildData buildData = null; while (build != null) { List<BuildData> buildDataList = build.getActions(BuildData.class); for (BuildData bd : buildDataList) { if (bd != null && isRelevantBuildData(bd)) { buildData = bd; break; } } if (buildData != null) { break; } build = build.getPreviousBuild(); } return buildData; } /** * Given the workspace, gets the working directory, which will be the workspace * if no relative target dir is specified. Otherwise, it'll be "workspace/relativeTargetDir". * * @param workspace * @return working directory or null if workspace is null */ protected FilePath workingDirectory(AbstractProject<?,?> context, FilePath workspace, EnvVars environment, TaskListener listener) throws IOException, InterruptedException { // JENKINS-10880: workspace can be null if (workspace == null) { return null; } for (GitSCMExtension ext : extensions) { FilePath r = ext.getWorkingDirectory(this, context, workspace, environment, listener); if (r!=null) return r; } return workspace; } /** * Given a Revision "r", check whether the list of revisions "COMMITS_WE_HAVE_BUILT..r" are to be entirely excluded given the exclusion rules * * @param git GitClient object * @param r Revision object * @param listener * @return true if any exclusion files are matched, false otherwise. */ private boolean isRevExcluded(GitClient git, Revision r, TaskListener listener, BuildData buildData) throws IOException, InterruptedException { try { List<String> revShow; if (buildData != null && buildData.lastBuild != null) { revShow = git.showRevision(buildData.lastBuild.revision.getSha1(), r.getSha1()); } else { revShow = git.showRevision(r.getSha1()); } revShow.add("commit "); // sentinel value int start=0, idx=0; for (String line : revShow) { if (line.startsWith("commit ") && idx!=0) { GitChangeSet change = new GitChangeSet(revShow.subList(start,idx), getExtensions().get(AuthorInChangelog.class)!=null); Boolean excludeThisCommit=null; for (GitSCMExtension ext : extensions) { excludeThisCommit = ext.isRevExcluded(this, git, change, listener, buildData); if (excludeThisCommit!=null) break; } if (excludeThisCommit==null || !excludeThisCommit) return false; // this sequence of commits have one commit that we want to build start = idx; } idx++; } assert start==revShow.size()-1; // every commit got excluded return true; } catch (GitException e) { e.printStackTrace(listener.error("Failed to determine if we want to exclude " + r.getSha1String())); return false; // for historical reason this is not considered a fatal error. } } @Initializer(after=PLUGINS_STARTED) public static void onLoaded() { DescriptorImpl desc = Jenkins.getInstance().getDescriptorByType(DescriptorImpl.class); if (desc.getOldGitExe() != null) { String exe = desc.getOldGitExe(); String defaultGit = GitTool.getDefaultInstallation().getGitExe(); if (defaultGit.equals(exe)) { return; } System.err.println("[WARNING] you're using deprecated gitexe attribute to configure git plugin. Use Git installations"); } } @Initializer(before=JOB_LOADED) public static void configureXtream() { Run.XSTREAM.registerConverter(new ObjectIdConverter()); Items.XSTREAM.registerConverter(new RemoteConfigConverter(Items.XSTREAM)); Items.XSTREAM.alias("org.spearce.jgit.transport.RemoteConfig", RemoteConfig.class); } private static final Logger LOGGER = Logger.getLogger(GitSCM.class.getName()); /** * Set to true to enable more logging to build's {@link TaskListener}. * Used by various classes in this package. */ public static boolean VERBOSE = Boolean.getBoolean(GitSCM.class.getName() + ".verbose"); /** * To avoid pointlessly large changelog, we'll limit the number of changes up to this. */ public static final int MAX_CHANGELOG = Integer.getInteger(GitSCM.class.getName()+".maxChangelog",1024); }
package info.tehnut.xtones; import com.google.common.base.Preconditions; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.item.Item; import net.minecraft.item.ItemBlock; import net.minecraft.item.ItemStack; import net.minecraftforge.event.RegistryEvent; import net.minecraftforge.fml.common.Loader; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.Mod.EventHandler; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.registries.IForgeRegistry; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import info.tehnut.xtones.block.FlatLampBlock; import info.tehnut.xtones.block.XtoneBlock; import info.tehnut.xtones.item.XtoneBlockItem; import info.tehnut.xtones.network.XtonesNetwork; import info.tehnut.xtones.support.ChiselSupport; import info.tehnut.xtones.support.ChiselsAndBitsSupport; import java.util.EnumMap; import java.util.Map; import java.util.stream.Stream; @Mod(modid = Xtones.ID, name = Xtones.NAME, version = Xtones.VERSION) @EventBusSubscriber(modid = Xtones.ID) public final class Xtones { public static final String ID = "xtones"; public static final String NAME = "Xtones"; public static final String VERSION = "@VERSION@"; private static final CreativeTabs CREATIVE_TAB = new CreativeTabs(ID) { @Override public ItemStack createIcon() { return new ItemStack(baseItem); } @Override public boolean hasSearchBar() { return true; } }.setBackgroundImageName("item_search.png"); private static final Map<Tone, Block> BLOCKS = new EnumMap<Tone, Block>(Tone.class); private static final Map<Tone, Item> ITEMS = new EnumMap<Tone, Item>(Tone.class); private static @MonotonicNonNull Block baseBlock; private static @MonotonicNonNull Block lampBlock; private static @MonotonicNonNull Item baseItem; private static @MonotonicNonNull Item lampItem; public static Stream<Block> blocks() { Preconditions.checkState(!BLOCKS.isEmpty()); return BLOCKS.values().stream(); } public static Stream<Item> items() { Preconditions.checkState(!ITEMS.isEmpty()); return ITEMS.values().stream(); } public static Block block(final Tone tone) { Preconditions.checkState(!BLOCKS.isEmpty()); return BLOCKS.get(tone); } public static Item item(final Tone tone) { Preconditions.checkState(!ITEMS.isEmpty()); return ITEMS.get(tone); } public static Block baseBlock() { Preconditions.checkState(baseBlock != null); return baseBlock; } public static Block lampBlock() { Preconditions.checkState(lampBlock != null); return lampBlock; } public static Item baseItem() { Preconditions.checkState(baseItem != null); return baseItem; } public static Item lampItem() { Preconditions.checkState(lampItem != null); return lampItem; } @EventHandler static void init(final FMLInitializationEvent event) { XtonesNetwork.init(); if (Loader.isModLoaded("chisel")) { ChiselSupport.init(); } if (Loader.isModLoaded("chiselsandbits")) { ChiselsAndBitsSupport.init(); } } @SubscribeEvent static void registerBlocks(final RegistryEvent.Register<Block> event) { final IForgeRegistry<Block> registry = event.getRegistry(); registry.register(baseBlock = new Block(Material.ROCK) .setRegistryName(ID, "base") .setTranslationKey(ID + ".base") .setCreativeTab(CREATIVE_TAB) .setResistance(3.0F) .setHardness(3.0F)); registry.register(lampBlock = new FlatLampBlock() .setRegistryName(ID, "lamp_flat") .setTranslationKey(ID + ".lamp_flat") .setCreativeTab(CREATIVE_TAB) .setHardness(0.5F)); for (final Tone tone : Tone.values()) { final Block block = new XtoneBlock(tone) .setRegistryName(ID, tone.toString()) .setTranslationKey(ID + '.' + tone) .setCreativeTab(CREATIVE_TAB) .setResistance(3.0F) .setHardness(3.0F); registry.register(block); BLOCKS.put(tone, block); } } @SubscribeEvent static void registerItems(final RegistryEvent.Register<Item> event) { final IForgeRegistry<Item> registry = event.getRegistry(); registry.register(baseItem = new ItemBlock(baseBlock()).setRegistryName(ID, "base")); registry.register(lampItem = new ItemBlock(lampBlock()).setRegistryName(ID, "lamp_flat")); for (final Tone tone : Tone.values()) { final Item item = new XtoneBlockItem(block(tone)).setRegistryName(ID, tone.toString()); registry.register(item); ITEMS.put(tone, item); } } }
package innovimax.mixthem; import innovimax.mixthem.arguments.*; import innovimax.mixthem.exceptions.*; import innovimax.mixthem.interfaces.*; import innovimax.mixthem.io.*; import java.io.File; import java.io.IOException; import java.io.OutputStream; /** * <p>Mix files together using variety of rules.</p> * <p>Here are the rules:</p> * <ul> * <li> 1: will output file1</li> * <li> 2: will output file2</li> * <li> +: will output file1+file2</li> * <li> alt-line: will output one line of each starting with first line of file1</li> * <li> alt-char: will output one char of each starting with first char of file1</li> * <li> random-alt-line[#seed]: will output one line of each code randomly based on a seed for reproducability</li> * <li> join[#col1][#col2]: will output merging of lines that have common occurrence</li> * </ul> * @author Innovimax * @version 1.0 */ public class MixThem { private final static int CHAR_BUFFER_SIZE = 1024; private final File file1, file2; private final OutputStream out; /** * Constructor * @param file1 The first file to be mixed * @param file2 The second file to be mixed * @param out The output stream to write mixing result */ public MixThem(File file1, File file2, OutputStream out) { this.file1 = file1; this.file2 = file2; this.out = out; } /** * Main entry. * @param args The command line arguments */ public static void main(String[] args) { run(args); } private static void run(String[] args) { try { Arguments mixArgs = Arguments.checkArguments(args); MixThem mixThem = new MixThem(mixArgs.getFirstFile(), mixArgs.getSecondFile(), System.out); mixThem.process(mixArgs.getRule()); } catch (ArgumentException e) { System.err.println("Files mixing can't be run due to following reason:"); System.err.println(e.getMessage()); printUsage(); } catch (MixException e) { System.err.println("Files mixing has been aborted due to following reason:"); System.err.println(e.getMessage()); } catch (Exception e) { System.err.println("An unexpected error occurs."); e.printStackTrace(); } } /** * Mix files together using rules. * @param rule The rule to be used for mixing * @throws MixException - If any error occurs during mixing * @see innovimax.mixthem.Rule */ public void process(Rule rule) throws MixException { try { switch(rule) { case _1: copyChar(this.file1, this.out); break; case _2: copyChar(this.file2, this.out); break; case _ADD: copyChar(this.file1, this.out); copyChar(this.file2, this.out); break; case _ALT_CHAR: copyAltChar(this.file1, this.file2, this.out); break; case _ALT_LINE: copySimpleAltLine(this.file1, this.file2, this.out); break; case _RANDOM_ALT_LINE: copyRandomAltLine(this.file1, this.file2, this.out); break; case _JOIN: //TODO // break; default: System.out.println("This rule has not been implemented yet."); } } catch (IOException e) { throw new MixException("Unexpected file error", e); } catch (MixException e) { throw e; } } // this one copies one file as beeing char private static void copyChar(File file, OutputStream out) throws MixException, IOException { char[] buffer = new char[CHAR_BUFFER_SIZE]; IInputChar reader = new DefaultCharReader(file); IOutputChar writer = new DefaultCharWriter(out); while (reader.hasCharacter()) { final int len = reader.nextCharacters(buffer, CHAR_BUFFER_SIZE); writer.writeCharacters(buffer, len); } reader.close(); writer.close(); } // this one copies two files alternativly char by char private static void copyAltChar(File file1, File file2, OutputStream out) throws MixException, IOException { IInputChar reader1 = new DefaultCharReader(file1); IInputChar reader2 = new DefaultCharReader(file2); IOutputChar writer = new DefaultCharWriter(out); boolean read1 = true; boolean read2 = true; boolean odd = true; while(read1 || read2) { if (read1) { if (reader1.hasCharacter()) { final int c = reader1.nextCharacter(); if (odd || !read2) { writer.writeCharacter(c); } } else { read1 = false; } } if (read2) { if (reader2.hasCharacter()) { final int c = reader2.nextCharacter(); if (!odd || !read1) { writer.writeCharacter(c); } } else { read2 = false; } } odd = !odd; } reader1.close(); reader2.close(); writer.close(); } // this one copies two files alternativly line by line private static void copySimpleAltLine(File file1, File file2, OutputStream out) throws MixException, IOException { copyAltLine(file1, file2, out, ReadType._SIMPLE); } // this one copies two files randomly alternativly line by line private static void copyRandomAltLine(File file1, File file2, OutputStream out) throws MixException, IOException { copyAltLine(file1, file2, out, ReadType._RANDOM); } private static void copyAltLine(File file1, File file2, OutputStream out, ReadType type) throws MixException, IOException { IInputLine reader1 = new DefaultLineReader(file1, true); IInputLine reader2 = new DefaultLineReader(file2, false); IOutputLine writer = new DefaultLineWriter(out); while (reader1.hasLine() || reader2.hasLine()) { final String line1 = reader1.nextLine(type, !reader2.hasLine()); if (line1 != null) { writer.writeLine(line1); } final String line2 = reader2.nextLine(type, !reader1.hasLine()); if (line2 != null) { writer.writeLine(line2); } } reader1.close(); reader2.close(); writer.close(); } public static void printUsage() { System.out.println(" "); System.out.println("Usage:"); System.out.println(" "); System.out.println(" mix-them file1 file2"); System.out.println(" (will generate any file based on file1 and file2)"); System.out.println(" "); System.out.println(" mix-them -[rule] file1 file2"); System.out.println(" (will generate a file based on the rule)"); System.out.println(" "); System.out.println(" Here are the list of rules"); for(Rule rule : Rule.values()) { System.out.print(" - " + rule.getName()); for(RuleParam param : rule.getParams()) { System.out.print(" ["+param.getName()+"]"); } System.out.println(": "+rule.getDescription()); } System.out.println(" "); } }
package joptsimple; import java.util.Collection; /** * Trains the option parser. This interface aids integration that disposes declaration of options but not actual * command-line parsing. * * Typical use is for another class to implement {@code OptionDeclarer} as a facade, forwarding calls to an * {@code OptionParser} instance. * * Note that although this is an interface, the returned values of calls are concrete jopt-simple classes. * * @author <a href="mailto:pholser@alumni.rice.edu">Paul Holser</a> * @see OptionParser * @since 4.6 */ public interface OptionDeclarer { OptionSpecBuilder accepts( String option ); OptionSpecBuilder accepts( String option, String description ); OptionSpecBuilder acceptsAll( Collection<String> options ); OptionSpecBuilder acceptsAll( Collection<String> options, String description ); /** * Gives an object that represents an access point for non-option arguments on a command line. * * @return an object that can be used to flesh out more detail about the non-option arguments */ NonOptionArgumentSpec<String> nonOptions(); /** * Gives an object that represents an access point for non-option arguments on a command line. * * @see #nonOptions() * @param description a string that describes the purpose of the non-option arguments. This is used when generating * help information about the parser. * @return an object that can be used to flesh out more detail about the non-option arguments */ NonOptionArgumentSpec<String> nonOptions( String description ); /** * Tells the parser whether or not to behave "POSIX-ly correct"-ly. * * @param setting {@code true} if the parser should behave "POSIX-ly correct"-ly */ void posixlyCorrect( boolean setting ); /** * <p>Tells the parser to treat unrecognized options as non-option arguments.</p> * * <p>If not called, then the parser raises an {@link OptionException} when it encounters an unrecognized * option.</p> */ void allowsUnrecognizedOptions(); /** * Tells the parser either to recognize or ignore <kbd>"-W"</kbd>-style long options. * * @param recognize {@code true} if the parser is to recognize the special style of long options */ void recognizeAlternativeLongOptions( boolean recognize ); }
package mho.qbar.objects; import mho.wheels.math.MathUtils; import mho.wheels.misc.BigDecimalUtils; import mho.wheels.misc.FloatUtils; import mho.wheels.misc.Readers; import mho.wheels.ordering.Ordering; import mho.wheels.structures.Pair; import mho.wheels.structures.Triple; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.math.RoundingMode; import java.util.*; import static mho.wheels.iterables.IterableUtils.*; import static mho.wheels.ordering.Ordering.*; public final class Rational implements Comparable<Rational> { public static final @NotNull Rational ZERO = new Rational(BigInteger.ZERO, BigInteger.ONE); public static final @NotNull Rational ONE = new Rational(BigInteger.ONE, BigInteger.ONE); public static final @NotNull Rational SMALLEST_FLOAT = new Rational(BigInteger.ONE, BigInteger.ONE.shiftLeft(149)); public static final @NotNull Rational LARGEST_SUBNORMAL_FLOAT = new Rational(BigInteger.ONE.shiftLeft(23).subtract(BigInteger.ONE), BigInteger.ONE.shiftLeft(149)); public static final @NotNull Rational SMALLEST_NORMAL_FLOAT = new Rational(BigInteger.ONE, BigInteger.ONE.shiftLeft(126)); public static final @NotNull Rational LARGEST_FLOAT = new Rational(BigInteger.ONE.shiftLeft(128).subtract(BigInteger.ONE.shiftLeft(104)), BigInteger.ONE); public static final @NotNull Rational SMALLEST_DOUBLE = new Rational(BigInteger.ONE, BigInteger.ONE.shiftLeft(1074)); public static final @NotNull Rational LARGEST_SUBNORMAL_DOUBLE = new Rational(BigInteger.ONE.shiftLeft(52).subtract(BigInteger.ONE), BigInteger.ONE.shiftLeft(1074)); public static final @NotNull Rational SMALLEST_NORMAL_DOUBLE = new Rational(BigInteger.ONE, BigInteger.ONE.shiftLeft(1022)); public static final @NotNull Rational LARGEST_DOUBLE = new Rational(BigInteger.ONE.shiftLeft(1024).subtract(BigInteger.ONE.shiftLeft(971)), BigInteger.ONE); /** * {@code this} times {@code denominator} */ private final @NotNull BigInteger numerator; private final @NotNull BigInteger denominator; /** * Private constructor from {@link BigInteger}s; assumes arguments are valid * * <ul> * <li>{@code numerator} cannot be null.</li> * <li>{@code denominator} cannot be null or equal to 0.</li> * <li>{@code numerator} and {@code denominator} cannot have a positive common factor greater than 1.</li> * <li>Any {@code Rational} may be constructed with this constructor.</li> * </ul> * * @param numerator the numerator * @param denominator the denominator */ private Rational(@NotNull BigInteger numerator, @NotNull BigInteger denominator) { this.numerator = numerator; this.denominator = denominator; } /** * Returns this {@code Rational}'s numerator * * <ul> * <li>The result is non-null.</li> * </ul> * * @return the numerator */ public @NotNull BigInteger getNumerator() { return numerator; } /** * Returns this {@code Rational}'s denominator * * <ul> * <li>The result is positive.</li> * </ul> * * @return the denominator */ public @NotNull BigInteger getDenominator() { return denominator; } /** * Creates a {@code Rational} from {@code BigInteger}s. Throws an exception if {@code denominator} is zero. Reduces * arguments and negates denominator if necessary. * * <ul> * <li>{@code numerator} cannot be null.</li> * <li>{@code denominator} cannot be null or equal to 0.</li> * <li>The result is non-null.</li> * </ul> * * @param numerator the numerator * @param denominator the denominator * @return the {@code Rational} corresponding to {@code numerator}/{@code denominator} * @throws java.lang.ArithmeticException if denominator is zero */ public static @NotNull Rational of(@NotNull BigInteger numerator, @NotNull BigInteger denominator) { if (denominator.equals(BigInteger.ZERO)) throw new ArithmeticException("division by zero"); if (numerator.equals(BigInteger.ZERO)) return ZERO; if (numerator.equals(denominator)) return ONE; BigInteger gcd = numerator.gcd(denominator); if (denominator.signum() < 0) gcd = gcd.negate(); return new Rational(numerator.divide(gcd), denominator.divide(gcd)); } public static @NotNull Rational of(long numerator, long denominator) { if (denominator == 0) throw new ArithmeticException("division by zero"); if (numerator == 0) return ZERO; if (numerator == denominator) return ONE; long gcd = MathUtils.gcd(numerator, denominator); if (denominator < 0) gcd = -gcd; return new Rational(BigInteger.valueOf(numerator / gcd), BigInteger.valueOf(denominator / gcd)); } public static @NotNull Rational of(int numerator, int denominator) { if (denominator == 0) throw new ArithmeticException("division by zero"); if (numerator == 0) return ZERO; if (numerator == denominator) return ONE; int gcd = MathUtils.gcd(numerator, denominator); if (denominator < 0) gcd = -gcd; return new Rational(BigInteger.valueOf(numerator / gcd), BigInteger.valueOf(denominator / gcd)); } /** * Creates a {@code Rational} from a {@code BigInteger}. * * <ul> * <li>{@code n} cannot be null.</li> * <li>The result is an integral {@code Rational}.</li> * </ul> * * @param n the {@code BigInteger} * @return the {@code Rational} corresponding to {@code n} */ public static @NotNull Rational of(@NotNull BigInteger n) { if (n.equals(BigInteger.ZERO)) return ZERO; if (n.equals(BigInteger.ONE)) return ONE; return new Rational(n, BigInteger.ONE); } public static @NotNull Rational of(long n) { if (n == 0) return ZERO; if (n == 1) return ONE; return new Rational(BigInteger.valueOf(n), BigInteger.ONE); } public static @NotNull Rational of(int n) { if (n == 0) return ZERO; if (n == 1) return ONE; return new Rational(BigInteger.valueOf(n), BigInteger.ONE); } public static @Nullable Rational of(float f) { if (f == 0.0) return ZERO; if (f == 1.0) return ONE; if (Float.isInfinite(f) || Float.isNaN(f)) return null; return of(new BigDecimal(Float.toString(f))); } public static @Nullable Rational of(double d) { if (d == 0.0) return ZERO; if (d == 1.0) return ONE; if (Double.isInfinite(d) || Double.isNaN(d)) return null; return of(BigDecimal.valueOf(d)); } public static @Nullable Rational ofExact(float f) { if (f == 0.0f) return ZERO; if (f == 1.0f) return ONE; if (Float.isInfinite(f) || Float.isNaN(f)) return null; int bits = Float.floatToIntBits(f); int exponent = bits >> 23 & ((1 << 8) - 1); int mantissa = bits & ((1 << 23) - 1); Rational rational; if (exponent == 0) { rational = of(mantissa).shiftRight(149); } else { rational = of(mantissa + (1 << 23), 1 << 23).shiftLeft(exponent - 127); } if (bits < 0) rational = rational.negate(); return rational; } public static @Nullable Rational ofExact(double d) { if (d == 0.0) return ZERO; if (d == 1.0) return ONE; if (Double.isInfinite(d) || Double.isNaN(d)) return null; long bits = Double.doubleToLongBits(d); int exponent = (int) (bits >> 52) & ((1 << 11) - 1); long mantissa = bits & ((1L << 52) - 1); Rational rational; if (exponent == 0) { rational = of(BigInteger.valueOf(mantissa)).shiftRight(1074); } else { Rational significand = of(BigInteger.valueOf(mantissa)).shiftRight(52); rational = significand.add(ONE).shiftLeft(exponent - 1023); } if (bits < 0) rational = rational.negate(); return rational; } public static @NotNull Rational of(@NotNull BigDecimal d) { return divide(of(d.unscaledValue()), of(10).pow(d.scale())); } /** * Rounds {@code this} to an integer according to {@code roundingMode}; see {@link java.math.RoundingMode} for * details. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code roundingMode} may be any {@code RoundingMode}.</li> * <li>If {@code roundingMode} is {@link java.math.RoundingMode#UNNECESSARY}, {@code this} must be an * integer.</li> * <li>The result is not null.</li> * </ul> * * @param roundingMode determines the way in which {@code this} is rounded. Options are * {@link java.math.RoundingMode#UP}, {@link java.math.RoundingMode#DOWN}, {@link java.math.RoundingMode#CEILING}, * {@link java.math.RoundingMode#FLOOR}, {@link java.math.RoundingMode#HALF_UP}, * {@link java.math.RoundingMode#HALF_DOWN}, {@link java.math.RoundingMode#HALF_EVEN}, and * {@link java.math.RoundingMode#UNNECESSARY}. * @return {@code this}, rounded */ public @NotNull BigInteger bigIntegerValue(@NotNull RoundingMode roundingMode) { Ordering halfCompare = compare(fractionalPart(), of(1, 2)); if (signum() == -1) halfCompare = halfCompare.invert(); switch (roundingMode) { case UNNECESSARY: if (denominator.equals(BigInteger.ONE)) { return numerator; } else { throw new ArithmeticException("Rational not an integer. Use a different rounding mode"); } case FLOOR: return floor(); case CEILING: return ceiling(); case DOWN: return numerator.divide(denominator); case UP: BigInteger down = numerator.divide(denominator); if (numerator.mod(denominator).equals(BigInteger.ZERO)) { return down; } else { if (numerator.signum() == 1) { return down.add(BigInteger.ONE); } else { return down.subtract(BigInteger.ONE); } } case HALF_DOWN: if (halfCompare == GT) { return bigIntegerValue(RoundingMode.UP); } else { return bigIntegerValue(RoundingMode.DOWN); } case HALF_UP: if (halfCompare == LT) { return bigIntegerValue(RoundingMode.DOWN); } else { return bigIntegerValue(RoundingMode.UP); } case HALF_EVEN: if (halfCompare == LT) return bigIntegerValue(RoundingMode.DOWN); if (halfCompare == GT) return bigIntegerValue(RoundingMode.UP); BigInteger floor = floor(); return floor.testBit(0) ? floor.add(BigInteger.ONE) : floor; } return null; //never happens } /** * Rounds {@code this} to the nearest {@code BigInteger}, breaking ties with the half-even rule (see * {@link java.math.RoundingMode#HALF_EVEN}). * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>The result is not null.</li> * </ul> * * @return {@code this}, rounded */ public @NotNull BigInteger bigIntegerValue() { return bigIntegerValue(RoundingMode.HALF_EVEN); } /** * Converts {@code this} to a {@code BigInteger}. Throws an {@link java.lang.ArithmeticException} if {@code this} * is not integral. * * <ul> * <li>{@code this} must be an integer.</li> * <li>The result is not null.</li> * </ul> * * @return the {@code BigInteger} value of {@code this} */ public @NotNull BigInteger bigIntegerValueExact() { return bigIntegerValue(RoundingMode.UNNECESSARY); } /** * Converts {@code this} to a {@code byte}. Throws an {@link java.lang.ArithmeticException} if {@code this} is not * integral or outside of a {@code byte}'s range. * * <ul> * <li>{@code this} must be an integer within a {@code byte}'s range.</li> * <li>The result can be any {@code byte}.</li> * </ul> * * @return the {@code byte} value of {@code this} */ public byte byteValueExact() { return bigIntegerValueExact().byteValueExact(); } /** * Converts {@code this} to a {@code short}. Throws an {@link java.lang.ArithmeticException} if {@code this} is not * integral or outside of a {@code short}'s range. * * <ul> * <li>{@code this} must be an integer within a {@code short}'s range.</li> * <li>The result can be any {@code short}.</li> * </ul> * * @return the {@code short} value of {@code this} */ public short shortValueExact() { return bigIntegerValueExact().shortValueExact(); } /** * Converts {@code this} to an {@code int}. Throws an {@link java.lang.ArithmeticException} if {@code this} is not * integral or outside of an {@code int}'s range. * * <ul> * <li>{@code this} must be an integer within an {@code int}'s range.</li> * <li>The result can be any {@code int}.</li> * </ul> * * @return the {@code int} value of {@code this} */ public int intValueExact() { return bigIntegerValueExact().intValueExact(); } /** * Converts {@code this} to a {@code long}. Throws an {@link java.lang.ArithmeticException} if {@code this} is not * integral or outside of a {@code long}'s range. * * <ul> * <li>{@code this} must be an integer within a {@code long}'s range.</li> * <li>The result can be any {@code long}.</li> * </ul> * * @return the {@code long} value of {@code this} */ public long longValueExact() { return bigIntegerValueExact().longValueExact(); } /** * Determines whether {@code this} has a terminating decimal expansion (that is, whether the denominator has no * prime factors other than 2 or 5). * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>The result may be either {@code boolean}.</li> * </ul> * * @return whether {@code this} has a terminating decimal expansion */ public boolean hasTerminatingDecimalExpansion() { BigInteger denominatorResidue = denominator.shiftRight(denominator.getLowestSetBit()); BigInteger five = BigInteger.valueOf(5); while (denominatorResidue.mod(five).equals(BigInteger.ZERO)) { denominatorResidue = denominatorResidue.divide(five); } return denominatorResidue.equals(BigInteger.ONE); } /** * Rounds {@code this} to a {@link java.math.BigDecimal} with a specified rounding mode (see documentation for * {@code java.math.RoundingMode} for details) and with a specified precision (number of significant digits), or * to full precision if {@code precision} is 0. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code precision} must be non-negative.</li> * <li>{@code roundingMode} may be any {@code RoundingMode}.</li> * <li>If {@code precision} is 0, then {@code this} must be a {@code Rational} whose decimal expansion is * terminating; that is, its denominator must only have 2 or 5 as prime factors.</li> * <li>If {@code roundingMode} is {@code RoundingMode.UNNECESSARY}, then {@code precision} must be at least as * large as the number of digits in {@code this}'s decimal expansion.</li> * <li>The result is a non-null.</li> * </ul> * * @param precision the precision with which to round {@code this}. 0 indicates full precision. * @param roundingMode specifies the details of how to round {@code this}. * @return {@code this}, in {@code BigDecimal} form */ public @NotNull BigDecimal bigDecimalValue(int precision, @NotNull RoundingMode roundingMode) { MathContext context = new MathContext(precision, roundingMode); BigDecimal result = new BigDecimal(numerator).divide(new BigDecimal(denominator), context); if (precision != 0) { result = BigDecimalUtils.setPrecision(result, precision); } return result; } public @NotNull BigDecimal bigDecimalValue(int precision) { return bigDecimalValue(precision, RoundingMode.HALF_EVEN); } public @NotNull BigDecimal bigDecimalValueExact() { //noinspection BigDecimalMethodWithoutRoundingCalled return new BigDecimal(numerator).divide(new BigDecimal(denominator)); } public int binaryExponent() { if (this == ONE) return 0; if (this == ZERO || this.signum() != 1) throw new IllegalArgumentException("Rational must be positive"); Rational adjusted = this; int exponent = 0; if (lt(numerator, denominator)) { while (lt(adjusted.numerator, adjusted.denominator)) { adjusted = adjusted.shiftLeft(1); exponent } } else { while (ge(adjusted.numerator, adjusted.denominator)) { adjusted = adjusted.shiftRight(1); exponent++; } exponent } return exponent; } /** * Every {@code Rational}has a <i>left-neighboring {@code float}</i>, or the largest {@code float} that is less * than or equal to the {@code Rational}; this {@code float} may be -Infinity. Likewise, every {@code Rational} * has a <i>right-neighboring {@code float}</i>: the smallest {@code float} greater than or equal to the * {@code Rational}. This float may be Infinity. If {@code this} is exactly equal to some {@code float}, the * left- and right-neighboring {@code float}s will both be equal to that {@code float} and to each other. This * method returns the pair made up of the left- and right-neighboring {@code float}s. If the left-neighboring * {@code float} is a zero, it is a positive zero; if the right-neighboring {@code float} is a zero, it is a * negative zero. The exception is when {@code this} is equal to zero; then both neighbors are positive zeroes. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>The result is a pair of {@code float}s that are either equal, or the second is the next-largest * {@code float} after the first. Negative zero may not appear in the first slot of the pair, and positive zero * may only appear in the second slot if the first slot also contains a positive zero. Neither slot may contain a * {@code NaN}.</li> * </ul> * * @return The pair of left- and right-neighboring {@code float}s. */ private @NotNull Pair<Float, Float> floatRange() { if (this == ZERO) return new Pair<>(0f, 0f); if (numerator.signum() == -1) { Pair<Float, Float> negativeRange = negate().floatRange(); assert negativeRange.a != null; assert negativeRange.b != null; return new Pair<>(-negativeRange.b, -negativeRange.a); } int exponent = binaryExponent(); if (exponent > 127 || exponent == 127 && gt(this, LARGEST_FLOAT)) { return new Pair<>(Float.MAX_VALUE, Float.POSITIVE_INFINITY); } Rational fraction; int adjustedExponent; if (exponent < -126) { fraction = shiftLeft(149); adjustedExponent = 0; } else { fraction = subtract(shiftRight(exponent), ONE).shiftLeft(23); adjustedExponent = exponent + 127; } float loFloat = Float.intBitsToFloat((adjustedExponent << 23) + fraction.floor().intValueExact()); float hiFloat = fraction.denominator.equals(BigInteger.ONE) ? loFloat : FloatUtils.successor(loFloat); return new Pair<>(loFloat, hiFloat); } /** * Every {@code Rational} has a <i>left-neighboring {@code double}</i>, or the largest {@code double} that is less * than or equal to the {@code Rational}; this {@code double} may be {@code -Infinity}. Likewise, every * {@code Rational} has a <i>right-neighboring {@code double}</i>: the smallest {@code double} greater than or * equal to the {@code Rational}. This double may be {@code Infinity}. If {@code this} is exactly equal to some * {@code double}, the left- and right-neighboring {@code double}s will both be equal to that {@code double} and * to each other. This method returns the pair made up of the left- and right-neighboring {@code double}s. If the * left-neighboring {@code double} is a zero, it is a positive zero; if the right-neighboring {@code double} is a * zero, it is a negative zero. The exception is when {@code this} is equal to zero; then both neighbors are * positive zeroes. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>The result is a pair of {@code double}s that are either equal, or the second is the next-largest * {@code double} after the first. Negative zero may not appear in the first slot of the pair, and positive zero * may only appear in the second slot if the first slot also contains a positive zero. Neither slot may contain a * {@code NaN}.</li> * </ul> * * @return The pair of left- and right-neighboring {@code double}s. */ private @NotNull Pair<Double, Double> doubleRange() { if (this == ZERO) return new Pair<>(0.0, 0.0); if (numerator.signum() == -1) { Pair<Double, Double> negativeRange = negate().doubleRange(); assert negativeRange.a != null; assert negativeRange.b != null; return new Pair<>(-negativeRange.b, -negativeRange.a); } int exponent = binaryExponent(); if (exponent > 1023 || exponent == 1023 && gt(this, LARGEST_DOUBLE)) { return new Pair<>(Double.MAX_VALUE, Double.POSITIVE_INFINITY); } Rational fraction; int adjustedExponent; if (exponent < -1022) { fraction = shiftLeft(1074); adjustedExponent = 0; } else { fraction = subtract(shiftRight(exponent), ONE).shiftLeft(52); adjustedExponent = exponent + 1023; } double loDouble = Double.longBitsToDouble(((long) adjustedExponent << 52) + fraction.floor().longValue()); double hiDouble = fraction.denominator.equals(BigInteger.ONE) ? loDouble : FloatUtils.successor(loDouble); return new Pair<>(loDouble, hiDouble); } public float floatValue(@NotNull RoundingMode roundingMode) { Pair<Float, Float> floatRange = floatRange(); assert floatRange.a != null; assert floatRange.b != null; if (floatRange.a.equals(floatRange.b)) return floatRange.a; Rational loFloat = ofExact(floatRange.a); Rational hiFloat = ofExact(floatRange.b); if ((loFloat == null || hiFloat == null) && roundingMode == RoundingMode.UNNECESSARY) { throw new ArithmeticException("Rational not exactly equal to a float. Use a different rounding mode"); } if (loFloat == null) { if (roundingMode == RoundingMode.FLOOR || roundingMode == RoundingMode.UP || roundingMode == RoundingMode.HALF_UP || roundingMode == RoundingMode.HALF_EVEN) { return Float.NEGATIVE_INFINITY; } else { return -Float.MAX_VALUE; } } if (hiFloat == null) { if (roundingMode == RoundingMode.CEILING || roundingMode == RoundingMode.UP || roundingMode == RoundingMode.HALF_UP || roundingMode == RoundingMode.HALF_EVEN) { return Float.POSITIVE_INFINITY; } else { return Float.MAX_VALUE; } } Rational midway = loFloat.add(hiFloat).shiftRight(1); Ordering midwayCompare = compare(this, midway); switch (roundingMode) { case UNNECESSARY: throw new ArithmeticException("Rational not exactly equal to a float. Use a different rounding mode"); case FLOOR: return floatRange.a; case CEILING: return floatRange.b; case DOWN: return floatRange.a < 0 ? floatRange.b : floatRange.a; case UP: return floatRange.a < 0 ? floatRange.a : floatRange.b; case HALF_DOWN: if (midwayCompare == EQ) return signum() == 1 ? floatRange.a : floatRange.b; return midwayCompare == LT ? floatRange.a : floatRange.b; case HALF_UP: if (midwayCompare == EQ) return signum() == 1 ? floatRange.b : floatRange.a; return midwayCompare == LT ? floatRange.a : floatRange.b; case HALF_EVEN: if (midwayCompare == LT) return floatRange.a; if (midwayCompare == GT) return floatRange.b; return (Float.floatToIntBits(floatRange.a) & 1) == 0 ? floatRange.a : floatRange.b; } return 0; //never happens } public float floatValue() { return floatValue(RoundingMode.HALF_EVEN); } /** * Returns a {@code float} exactly equal to {@code this}. Throws an {@code ArithmeticException} if {@code this} is * not exactly equal to a {@code float}. * * <ul> * <li>{@code this} must be a {@code Rational} equal to a {@code float}.</li> * <li>The result is not {@code NaN}, infinite, or negative 0.</li> * </ul> * * @return {@code this}, in {@code float} form */ public float floatValueExact() { return floatValue(RoundingMode.UNNECESSARY); } public double doubleValue(@NotNull RoundingMode roundingMode) { Pair<Double, Double> doubleRange = doubleRange(); assert doubleRange.a != null; assert doubleRange.b != null; if (doubleRange.a.equals(doubleRange.b)) return doubleRange.a; Rational loDouble = ofExact(doubleRange.a); Rational hiDouble = ofExact(doubleRange.b); if ((loDouble == null || hiDouble == null) && roundingMode == RoundingMode.UNNECESSARY) { throw new ArithmeticException("Rational not exactly equal to a double. Use a different rounding mode"); } if (loDouble == null) { if (roundingMode == RoundingMode.FLOOR || roundingMode == RoundingMode.UP || roundingMode == RoundingMode.HALF_UP || roundingMode == RoundingMode.HALF_EVEN) { return Double.NEGATIVE_INFINITY; } else { return -Double.MAX_VALUE; } } if (hiDouble == null) { if (roundingMode == RoundingMode.CEILING || roundingMode == RoundingMode.UP || roundingMode == RoundingMode.HALF_UP || roundingMode == RoundingMode.HALF_EVEN) { return Double.POSITIVE_INFINITY; } else { return Double.MAX_VALUE; } } Rational midway = loDouble.add(hiDouble).shiftRight(1); Ordering midwayCompare = compare(this, midway); switch (roundingMode) { case UNNECESSARY: throw new ArithmeticException("Rational not exactly equal to a double. Use a different rounding mode"); case FLOOR: return doubleRange.a; case CEILING: return doubleRange.b; case DOWN: return doubleRange.a < 0 ? doubleRange.b : doubleRange.a; case UP: return doubleRange.a < 0 ? doubleRange.a : doubleRange.b; case HALF_DOWN: if (midwayCompare == EQ) return signum() == 1 ? doubleRange.a : doubleRange.b; return midwayCompare == LT ? doubleRange.a : doubleRange.b; case HALF_UP: if (midwayCompare == EQ) return signum() == 1 ? doubleRange.b : doubleRange.a; return midwayCompare == LT ? doubleRange.a : doubleRange.b; case HALF_EVEN: if (midwayCompare == LT) return doubleRange.a; if (midwayCompare == GT) return doubleRange.b; return (Double.doubleToLongBits(doubleRange.a) & 1) == 0 ? doubleRange.a : doubleRange.b; } return 0; //never happens } public double doubleValue() { return doubleValue(RoundingMode.HALF_EVEN); } /** * Returns a {@code double} exactly equal to {@code this}. Throws an {@code ArithmeticException} if {@code this} is * not exactly equal to a {@code double}. * * <ul> * <li>{@code this} must be a {@code Rational} equal to a {@code double}.</li> * <li>The result is not {@code NaN}, infinite, or negative 0.</li> * </ul> * * @return {@code this}, in {@code double} form */ public double doubleValueExact() { return doubleValue(RoundingMode.UNNECESSARY); } public @NotNull Rational negate() { if (this == ZERO) return ZERO; BigInteger negativeNumerator = numerator.negate(); if (negativeNumerator.equals(denominator)) return ONE; return new Rational(negativeNumerator, denominator); } /** * Returns the multiplicative inverse of {@code this}. * * <ul> * <li>{@code this} may be any non-zero {@code Rational}.</li> * <li>The result is a non-zero {@code Rational}.</li> * </ul> * * @return 1/{@code this}. */ public @NotNull Rational invert() { if (this == ZERO) throw new ArithmeticException("division by zero"); if (equals(ONE)) return ONE; if (numerator.signum() == -1) { return new Rational(denominator.negate(), numerator.negate()); } else { return new Rational(denominator, numerator); } } /** * Returns the absolute value of {@code this}. * * <ul> * <li>{@code this} may be any {@code Rational.}</li> * <li>The result is a non-negative {@code Rational}.</li> * </ul> * * @return |{@code this}|. */ public @NotNull Rational abs() { return numerator.signum() == -1 ? negate() : this; } public int signum() { return numerator.signum(); } /** * Returns the sum of {@code this} and {@code that}. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code that} cannot be null.</li> * <li>The result is not null.</li> * </ul> * * @param that the {@code Rational} added to {@code this} * @return {@code this}+{@code that} */ public @NotNull Rational add(@NotNull Rational that) { if (this == ZERO) return that; if (that == ZERO) return this; BigInteger d1 = denominator.gcd(that.denominator); if (d1.equals(BigInteger.ONE)) { BigInteger sn = numerator.multiply(that.denominator).add(denominator.multiply(that.numerator)); if (sn.equals(BigInteger.ZERO)) return ZERO; BigInteger sd = denominator.multiply(that.denominator); if (sn.equals(sd)) return ONE; return new Rational(sn, sd); } else { BigInteger t = numerator.multiply(that.denominator.divide(d1)) .add(that.numerator.multiply(denominator.divide(d1))); if (t.equals(BigInteger.ZERO)) return ZERO; BigInteger d2 = t.gcd(d1); BigInteger sn = t.divide(d2); BigInteger sd = denominator.divide(d1).multiply(that.denominator.divide(d2)); if (sn.equals(sd)) return ONE; return new Rational(sn, sd); } } public static @NotNull Rational subtract(@NotNull Rational a, @NotNull Rational b) { return a.add(b.negate()); } public @NotNull Rational multiply(@NotNull Rational that) { if (this == ZERO || that == ZERO) return ZERO; if (this == ONE) return that; if (that == ONE) return this; return of(numerator.multiply(that.getNumerator()), denominator.multiply(that.getDenominator())); } public @NotNull Rational multiply(@NotNull BigInteger that) { if (this == ZERO || that.equals(BigInteger.ZERO)) return ZERO; if (numerator.equals(BigInteger.ONE) && denominator.equals(that) || numerator.equals(BigInteger.ONE.negate()) && denominator.equals(that.negate())) return ONE; BigInteger g = denominator.gcd(that); return new Rational(numerator.multiply(that.divide(g)), denominator.divide(g)); } public @NotNull Rational multiply(int that) { if (this == ZERO || that == 0) return ZERO; if (numerator.equals(BigInteger.ONE) && denominator.equals(BigInteger.valueOf(that))) return ONE; if (numerator.equals(BigInteger.ONE.negate()) && denominator.equals(BigInteger.valueOf(that).negate())) return ONE; BigInteger g = denominator.gcd(BigInteger.valueOf(that)); return new Rational(numerator.multiply(BigInteger.valueOf(that).divide(g)), denominator.divide(g)); } /** * Returns the quotient of {@code a} and {@code b}. * * <ul> * <li>{@code a} cannot be null.</li> * <li>{@code b} cannot be null or zero.</li> * <li>The result is not null.</li> * </ul> * * @param a the first {@code Rational} * @param b the second {@code Rational} * @return {@code a}/{@code b} */ public static @NotNull Rational divide(@NotNull Rational a, @NotNull Rational b) { if (b == ZERO) throw new ArithmeticException("division by zero"); return a.multiply(b.invert()); } /** * Returns the quotient of {@code this} and {@code that}. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code that} cannot be null or zero.</li> * <li>The result is not null.</li> * </ul> * * @param that the divisor * @return {@code this}/{@code that} */ public @NotNull Rational divide(@NotNull BigInteger that) { if (that.equals(BigInteger.ZERO)) throw new ArithmeticException("division by zero"); if (this == ZERO) return ZERO; if (denominator.equals(BigInteger.ONE) && numerator.equals(that)) return ONE; BigInteger g = numerator.gcd(that); if (that.signum() == -1) g = g.negate(); return new Rational(numerator.divide(g), denominator.multiply(that.divide(g))); } /** * Returns the quotient of {@code this} and {@code that}. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code that} cannot be zero.</li> * <li>The result is not null.</li> * </ul> * * @param that the divisor * @return {@code this}/{@code that} */ public @NotNull Rational divide(int that) { if (that == 0) throw new ArithmeticException("division by zero"); if (this == ZERO) return ZERO; if (denominator.equals(BigInteger.ONE) && numerator.equals(BigInteger.valueOf(that))) return ONE; BigInteger g = numerator.gcd(BigInteger.valueOf(that)); if (that < 0) g = g.negate(); return new Rational(numerator.divide(g), denominator.multiply(BigInteger.valueOf(that).divide(g))); } public static Rational sum(@NotNull Iterable<Rational> xs) { return foldl(p -> p.a.add(p.b), ZERO, xs); } public static Rational product(@NotNull Iterable<Rational> xs) { return foldl(p -> p.a.multiply(p.b), ONE, xs); } public static @NotNull Iterable<Rational> delta(@NotNull Iterable<Rational> xs) { if (isEmpty(xs)) throw new IllegalArgumentException("cannot get delta of empty Iterable"); if (head(xs) == null) throw new NullPointerException(); return adjacentPairsWith(p -> subtract(p.b, p.a), xs); } /** * The {@code n}th harmonic number, or the sum of the first {@code n} reciprocals. * * <ul> * <li>{@code n} must be positive.</li> * <li>The result is a harmonic number.</li> * </ul> * * @param n the index of a harmonic number * @return H<sub>{@code n}</sub> */ public static @NotNull Rational harmonicNumber(int n) { if (n < 1) throw new ArithmeticException("harmonic number must have positive index"); return sum(map(i -> of(i).invert(), range(1, n))); } /** * an {@code Iterable} that contains every harmonic number. Does not support removal. * * Length is infinite */ public static final @NotNull Iterable<Rational> HARMONIC_NUMBERS = tail(scanl(p -> { assert p.a != null; assert p.b != null; return p.a.add(p.b); }, ZERO, map(i -> of(i).invert(), range(1)))); /** * Returns {@code this} raised to the power of {@code p}. 0<sup>0</sup> yields 1. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code p} may be any {@code int}.</li> * <li>If {@code p}{@literal <}0, {@code this} cannot be 0.</li> * <li>The result is not null.</li> * </ul> * * @param p the power * @return {@code this}<sup>{@code p}</sup> */ public @NotNull Rational pow(int p) { if (p == 0) return ONE; if (p == 1) return this; if (p < 0) { if (this == ZERO) throw new ArithmeticException("division by zero"); return invert().pow(-p); } if (this == ZERO || this == ONE) return this; if (this.equals(ONE.negate()) && p % 2 == 0) return ONE; BigInteger pNumerator = numerator.pow(p); BigInteger pDenominator = denominator.pow(p); if (pDenominator.signum() == -1) { pNumerator = pNumerator.negate(); pDenominator = pDenominator.negate(); } return new Rational(pNumerator, pDenominator); } public @NotNull BigInteger floor() { if (numerator.signum() < 0) { return negate().ceiling().negate(); } else { return numerator.divide(denominator); } } public @NotNull BigInteger ceiling() { if (numerator.signum() < 0) { return negate().floor().negate(); } else { if (numerator.mod(denominator).equals(BigInteger.ZERO)) { return numerator.divide(denominator); } else { return numerator.divide(denominator).add(BigInteger.ONE); } } } public @NotNull Rational fractionalPart() { if (denominator.equals(BigInteger.ONE)) return ZERO; return subtract(this, of(floor())); } //todo finish fixing JavaDoc /** * Rounds {@code this} a rational number that is an integer multiple of 1/{@code denominator} according to * {@code roundingMode}; see documentation for {@code java.math.RoundingMode} for details. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code denominator} must be positive.</li> * <li>If {@code roundingMode} is {@code UNNECESSARY}, {@code this}'s denominator must divide * {@code denominator}.</li> * <li>The result is not null.</li> * </ul> * * @param denominator the denominator which represents the precision that {@code this} is rounded to. * @param roundingMode determines the way in which {@code this} is rounded. Options are {@code UP}, * {@code DOWN}, {@code CEILING}, {@code FLOOR}, {@code HALF_UP}, {@code HALF_DOWN}, * {@code HALF_EVEN}, and {@code UNNECESSARY}. * @return {@code this}, rounded to an integer multiple of 1/{@code denominator} */ public @NotNull Rational roundToDenominator(@NotNull BigInteger denominator, @NotNull RoundingMode roundingMode) { if (denominator.signum() != 1) throw new ArithmeticException("must round to a positive denominator"); return of(multiply(denominator).bigIntegerValue(roundingMode)).divide(denominator); } public @NotNull Rational shiftLeft(int bits) { if (this == ZERO) return ZERO; if (bits == 0) return this; if (bits < 0) return shiftRight(-bits); int denominatorTwos = denominator.getLowestSetBit(); if (bits <= denominatorTwos) { BigInteger shifted = denominator.shiftRight(bits); if (numerator.equals(BigInteger.ONE) && shifted.equals(BigInteger.ONE)) return ONE; return new Rational(numerator, shifted); } else { BigInteger shiftedNumerator = numerator.shiftLeft(bits - denominatorTwos); BigInteger shiftedDenominator = denominator.shiftRight(denominatorTwos); if (shiftedNumerator.equals(BigInteger.ONE) && shiftedDenominator.equals(BigInteger.ONE)) return ONE; return new Rational(shiftedNumerator, shiftedDenominator); } } public @NotNull Rational shiftRight(int bits) { if (this == ZERO) return ZERO; if (bits == 0) return this; if (bits < 0) return shiftLeft(-bits); int numeratorTwos = numerator.getLowestSetBit(); if (bits <= numeratorTwos) { BigInteger shifted = numerator.shiftRight(bits); if (shifted.equals(BigInteger.ONE) && denominator.equals(BigInteger.ONE)) return ONE; return new Rational(shifted, denominator); } else { BigInteger shiftedNumerator = numerator.shiftRight(numeratorTwos); BigInteger shiftedDenominator = denominator.shiftLeft(bits - numeratorTwos); if (shiftedNumerator.equals(BigInteger.ONE) && shiftedDenominator.equals(BigInteger.ONE)) return ONE; return new Rational(shiftedNumerator, shiftedDenominator); } } /** * Finds the continued fraction of {@code this}. If we pretend that the result is an array called a of length n, * then {@code this}=a[0]+1/(a[1]+1/(a[2]+...+1/a[n-1]...)). Every rational number has two such representations; * this method returns the shortest one. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>The result is non-null and non-empty. The first element may be any {@code BigInteger}; the remaining * elements, if any, are all positive. If the result has more than one element, the last element is greater than * 1.</li> * </ul> * * @return the continued-fraction-representation of {@code this} */ public @NotNull List<BigInteger> continuedFraction() { List<BigInteger> continuedFraction = new ArrayList<>(); Rational remainder = this; while (true) { BigInteger floor = remainder.floor(); continuedFraction.add(floor); remainder = subtract(remainder, of(floor)); if (remainder == ZERO) break; remainder = remainder.invert(); } return continuedFraction; } /** * Returns the {@code Rational} corresponding to a continued fraction. Every rational number has two continued- * fraction representations; either is accepted. * * <ul> * <li>{@code continuedFraction} must be non-null and non-empty. All elements but the first must be * positive.</li> * <li>The result is not null.</li> * </ul> * * @param continuedFraction a continued fraction * @return a[0]+1/(a[1]+1/(a[2]+...+1/a[n-1]...)) */ public static @NotNull Rational fromContinuedFraction(@NotNull List<BigInteger> continuedFraction) { Rational x = of(continuedFraction.get(continuedFraction.size() - 1)); for (int i = continuedFraction.size() - 2; i >= 0; i if (i != 0 && continuedFraction.get(i).signum() != 1) throw new IllegalArgumentException("all continued fraction elements but the first must be positive"); x = x.invert().add(of(continuedFraction.get(i))); } return x; } /** * Returns the convergents, or rational approximations of {@code this} formed by truncating its continued fraction * at various points. The first element of the result is the floor of {@code this}, and the last element is * {@code this}. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>The result is a non-null, non-empty list that consists of the convergents of its last element.</li> * </ul> * * @return the convergents of {@code this}. */ public @NotNull List<Rational> convergents() { List<Rational> approximations = new ArrayList<>(); List<BigInteger> continuedFraction = continuedFraction(); for (int i = 0; i < continuedFraction.size(); i++) { List<BigInteger> truncatedContinuedFraction = new ArrayList<>(); for (int j = 0; j <= i; j++) { truncatedContinuedFraction.add(continuedFraction.get(j)); } approximations.add(fromContinuedFraction(truncatedContinuedFraction)); } return approximations; } /** * Returns the positional expansion of (non-negative) {@code this} in a given base, in the form of a triple of * {@code BigInteger} lists. The first list contains the digits before the decimal point, the second list contains * the non-repeating part of the digits after the decimal point, and the third list contains the repeating digits. * The digits are given in the usual order: most-significant first. The first two lists may be empty, but the third * is always non-empty (if the digits terminate, the third list contains a single zero). The sign of {@code this} * is ignored. * * <ul> * <li>{@code this} must be non-negative.</li> * <li>{@code base} must be greater than 1.</li> * <li>The elements of the result are all non-null. The elements of the elements are all non-negative. The first * element does not begin with a zero. The last element is non-empty. The second and third lists are minimal; that * is, the sequence (second)(third)(third)(third)... cannot be represented in a more compact way. The lists [1, 2] * and [3, 1, 2] are not minimal, because the sequence [1, 2, 3, 1, 2, 3, 1, 2, ...] can be represented by [] and * [1, 2, 3]. The lists [] and [1, 2, 1, 2] are not minimal either, because the sequence * [1, 2, 1, 2, 1, 2, ...] can be represented by [] and [1, 2].</li> * </ul> * * @param base the base of the positional expansion * @return a triple containing the digits before the decimal point, the non-repeating digits after the decimal * point, and the repeating digits. */ public @NotNull Triple<List<BigInteger>, List<BigInteger>, List<BigInteger>> positionalNotation(@NotNull BigInteger base) { if (signum() == -1) throw new IllegalArgumentException("this cannot be negative"); BigInteger floor = floor(); List<BigInteger> beforeDecimal = toList(MathUtils.digits(base, floor)); Rational fractionalPart = subtract(this, of(floor)); BigInteger numerator = fractionalPart.numerator; BigInteger denominator = fractionalPart.denominator; BigInteger remainder = numerator.multiply(base); int index = 0; Integer repeatingIndex; List<BigInteger> digits = new ArrayList<>(); Map<BigInteger, Integer> remainders = new HashMap<>(); while (true) { remainders.put(remainder, index); BigInteger digit = remainder.divide(denominator); digits.add(digit); remainder = remainder.subtract(denominator.multiply(digit)).multiply(base); repeatingIndex = remainders.get(remainder); if (repeatingIndex != null) break; index++; } List<BigInteger> nonrepeating = new ArrayList<>(); List<BigInteger> repeating = new ArrayList<>(); for (int i = 0; i < repeatingIndex; i++) { nonrepeating.add(digits.get(i)); } for (int i = repeatingIndex; i < digits.size(); i++) { repeating.add(digits.get(i)); } return new Triple<>(beforeDecimal, nonrepeating, repeating); } /** * Creates a {@code Rational} from a base expansion. * * <ul> * <li>{@code base} must be greater than 1.</li> * <li>{@code beforeDecimalPoint} must only contain elements greater than or equal to zero and less than * {@code base}.</li> * <li>{@code nonRepeating} must only contain elements greater than or equal to zero and less than * {@code base}.</li> * <li>{@code repeating} must only contain elements greater than or equal to zero and less than * {@code base}. It must also be non-empty; to input a terminating expansion, use one (or more) zeros.</li> * </ul> * * @param base the base of the positional expansion * @param beforeDecimalPoint the digits before the decimal point * @param nonRepeating the non-repeating portion of the digits after the decimal point * @param repeating the repeating portion of the digits after the decimal point * @return (beforeDecimalPoint).(nonRepeating)(repeating)(repeating)(repeating)..._(base) */ public static @NotNull Rational fromPositionalNotation( @NotNull BigInteger base, @NotNull List<BigInteger> beforeDecimalPoint, @NotNull List<BigInteger> nonRepeating, @NotNull List<BigInteger> repeating ) { BigInteger floor = MathUtils.fromDigits(base, beforeDecimalPoint); BigInteger nonRepeatingInteger = MathUtils.fromDigits(base, nonRepeating); BigInteger repeatingInteger = MathUtils.fromDigits(base, repeating); Rational nonRepeatingPart = of(nonRepeatingInteger, base.pow(nonRepeating.size())); Rational repeatingPart = of(repeatingInteger, base.pow(repeating.size()).subtract(BigInteger.ONE)) .divide(base.pow(nonRepeating.size())); return of(floor).add(nonRepeatingPart).add(repeatingPart); } /** * Returns the digits of (non-negative) {@code this} in a given base. The return value is a pair consisting of the * digits before the decimal point (in a list) and the digits after the decimal point (in a possibly-infinite * iterable). Trailing zeroes are not included. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code base} must be greater than 1.</li> * <li>the result </li> * </ul> * * @param base the base of the digits * @return a pair consisting of the digits before the decimal point and the digits after */ public @NotNull Pair<List<BigInteger>, Iterable<BigInteger>> digits(BigInteger base) { Triple<List<BigInteger>, List<BigInteger>, List<BigInteger>> positionalNotation = positionalNotation(base); Iterable<BigInteger> afterDecimal; assert positionalNotation.c != null; if (positionalNotation.c.equals(Arrays.asList(BigInteger.ZERO))) { afterDecimal = positionalNotation.b; } else { assert positionalNotation.b != null; afterDecimal = concat(positionalNotation.b, cycle(positionalNotation.c)); } return new Pair<>(positionalNotation.a, afterDecimal); } /** * Determines whether {@code this} is equal to {@code that}. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>{@code that} may be any {@code Object}.</li> * <li>The result may be either {@code boolean}.</li> * </ul> * * @param that The {@code Rational} to be compared with {@code this} * @return {@code this}={@code that} */ @Override public boolean equals(Object that) { if (this == that) return true; if (that == null || Rational.class != that.getClass()) return false; Rational r = (Rational) that; return denominator.equals(r.denominator) && numerator.equals(r.numerator); } /** * Calculates the hash code of {@code this}. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>(conjecture) The result may be any {@code int}.</li> * </ul> * * @return {@code this}'s hash code. */ @Override public int hashCode() { return 31 * numerator.hashCode() + denominator.hashCode(); } @Override public int compareTo(@NotNull Rational that) { return numerator.multiply(that.denominator).compareTo(that.numerator.multiply(denominator)); } public static @NotNull Optional<Rational> read(@NotNull String s) { if (s.isEmpty()) return Optional.empty(); int slashIndex = s.indexOf("/"); if (slashIndex == -1) { Optional<BigInteger> n = Readers.readBigInteger(s); return n.map(Rational::of); } else { Optional<BigInteger> numerator = Readers.readBigInteger(s.substring(0, slashIndex)); if (!numerator.isPresent()) return Optional.empty(); Optional<BigInteger> denominator = Readers.readBigInteger(s.substring(slashIndex + 1)); if (!denominator.isPresent() || denominator.get().equals(BigInteger.ZERO)) return Optional.empty(); return Optional.of(of(numerator.get(), denominator.get())); } } /** * Creates a string representation of {@code this}. * * <ul> * <li>{@code this} may be any {@code Rational}.</li> * <li>The result is a string in one of two forms: {@code a.toString()} or {@code a.toString() + "/" + * b.toString()}, where {@code a} and {@code b} are some {@code BigInteger}s such that {@code b} is * positive and {@code a} and {@code b} have no positive common factors greater than 1.</li> * </ul> * * @return a string representation of {@code this} */ public @NotNull String toString() { if (denominator.equals(BigInteger.ONE)) { return numerator.toString(); } else { return numerator.toString() + "/" + denominator.toString(); } } }
package mll.service; import java.io.BufferedReader; import java.util.Date; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import mll.beans.Invite; import mll.dao.InviteDAO; public class InviteService { InviteDAO dao; public InviteService() { dao = new InviteDAO(); } /** * This method takes HTTP request and response objects as input and * first validates the invite request and if it is valid then based * on the access type it transfer it to proper service method and * return the JSON object as a response. * @author Dhaval Patel * @version 1.0 * @since 2016-04-06 */ @SuppressWarnings("unchecked") public JSONObject handleInviteRequest(HttpServletRequest request, HttpServletResponse response) { JSONObject responseObject = new JSONObject(); Invite invite = populateInviteBeansFromRequest(request); String email = invite.getToken().getEmailId(); // boolean isValid = dao.checkEmailId(email); if(null != invite.getActiontype()) { if(invite.getActiontype().equalsIgnoreCase("generate")) { responseObject = generateInvite(invite); } else { responseObject = validateInvite(invite); } } else { responseObject.put("isGenerated", false); responseObject.put("isValid", false); responseObject.put("errorMessage", "Error while processing this invite request. Please try again with valid invite details."); } return responseObject; } /** * This method takes Invite object as input and first generate the * token and url string and also send the mail to the recipient. It * returns the response as a JSON object. * @author Dhaval Patel * @version 1.0 * @since 2016-04-06 */ @SuppressWarnings("unchecked") public JSONObject generateInvite(Invite invite) { try { invite = dao.generateInvite(invite); if(invite.getIsGenerated()) { MailService mailService = new MailService(); mailService.sendInvite(invite); } else { invite.setIsGenerated(false); invite.setMessage("Request does not contain valid data. Please submit with proper invite details."); } } catch(Exception e) { e.printStackTrace(); invite.setIsGenerated(false); invite.setMessage("Error while sending Invite. Please submit with proper invite details."); } JSONObject responseObject = new JSONObject(); responseObject.put("isGenerated", invite.getIsGenerated()); responseObject.put("message", invite.getMessage()); responseObject.put("URL", invite.getUrl()); return responseObject; } /** * This method takes Invite object as an input and validate the invite. * Based on the invite the response JSON object will be returned. * @author Dhaval Patel * @version 1.0 * @since 2016-04-06 */ @SuppressWarnings("unchecked") public JSONObject validateInvite(Invite invite) { try { invite = dao.validateInvite(invite); } catch(Exception e) { e.printStackTrace(); invite.setIsValid(false); invite.setMessage("Error while validating Invite. Please submit again."); } JSONObject responseObject = new JSONObject(); responseObject.put("isValid", invite.getIsValid()); responseObject.put("errorMessage", invite.getMessage()); return responseObject; } /** * This method takes http request and populate the form beans from JSON * object and creates the Invite object based on the action type. * @author Dhaval Patel * @version 1.0 * @since 2016-04-06 */ public Invite populateInviteBeansFromRequest(HttpServletRequest request) { Invite invite = new Invite(); try { StringBuffer requestStr = new StringBuffer(); BufferedReader reader = request.getReader(); String line = null; while ((line = reader.readLine()) != null) { requestStr.append(line); } JSONParser parser = new JSONParser(); JSONObject tokenJsonObject = (JSONObject) parser.parse(requestStr.toString()); invite = populateInviteBeansFromRequest(invite, tokenJsonObject); } catch(Exception e) { e.printStackTrace(); // Error message will be set from the main method. } return invite; } public Invite populateInviteBeansFromRequest(Invite invite, JSONObject tokenJsonObject) { if(null != invite && null != tokenJsonObject) { invite.setActiontype((String) tokenJsonObject.get("actionType")); if(invite.getActiontype().equalsIgnoreCase("generate")) { invite.getToken().setEmailId((String) tokenJsonObject.get("email")); invite.getToken().setToken(""); invite.getToken().setInviteType((String) tokenJsonObject.get("inviteType")); invite.getToken().setMessageBody((String) tokenJsonObject.get("messageBody")); invite.getToken().setIssueDate(new Date()); invite.getToken().setIsUsed(false); invite.getToken().setUserId(((Long) tokenJsonObject.get("userId")).intValue()); } else { invite.getToken().setToken((String) tokenJsonObject.get("token")); invite.getToken().setInviteType((String) tokenJsonObject.get("inviteType")); } } return invite; } }
package se.raddo.raddose3D.tests; import static org.testng.Assert.*; import org.testng.annotations.*; import se.raddo.raddose3D.Beam; import se.raddo.raddose3D.Crystal; import se.raddo.raddose3D.Experiment; import se.raddo.raddose3D.ExperimentDummy; import se.raddo.raddose3D.Output; import se.raddo.raddose3D.Wedge; import static org.mockito.Mockito.*; public class ExperimentTest { Crystal c = mock(Crystal.class); Wedge w = mock(Wedge.class); Beam b = mock(Beam.class); @Test public void testExperimentWithCrystalAndNullValues() { Experiment e = new Experiment(); OutputTestSubscriber testsubscriber = mock(OutputTestSubscriber.class); e.addObserver(testsubscriber); verify(testsubscriber, never()).publishBeam(null); testsubscriber.publishBeam(b); verify(testsubscriber, never()).publishBeam(null); } @Test public void testExperimentSimple() { Experiment e = new Experiment(); OutputTestSubscriber testsubscriber = new OutputTestSubscriber(); e.addObserver(testsubscriber); // No message sent yet assertNull(testsubscriber.lastseenobject); assertNull(testsubscriber.lastseencrystal); assertNull(testsubscriber.lastseenbeam); assertNull(testsubscriber.lastseenwedge); assertEquals(testsubscriber.seenobjects, 0); assertEquals(testsubscriber.seenclose, 0); e.setCrystal(c); // One object sent assertEquals(testsubscriber.lastseenobject, c); assertEquals(testsubscriber.lastseencrystal, c); assertNull(testsubscriber.lastseenbeam); assertNull(testsubscriber.lastseenwedge); assertEquals(testsubscriber.seenobjects, 1); assertEquals(testsubscriber.seenclose, 0); // Null values should be handled gracefully and ignored e.setBeam(null); e.setCrystal(null); e.exposeWedge(null); // One object sent assertEquals(testsubscriber.lastseenobject, c); assertEquals(testsubscriber.lastseencrystal, c); assertNull(testsubscriber.lastseenbeam); assertNull(testsubscriber.lastseenwedge); assertEquals(testsubscriber.seenobjects, 1); assertEquals(testsubscriber.seenclose, 0); e.close(); System.out.println("@Test - testExperimentSimple"); } @Test public void testExperimentComplex() { Experiment e = new ExperimentDummy(); OutputTestSubscriber testsubscriber = new OutputTestSubscriber(); e.addObserver(testsubscriber); // No message sent yet assertNull(testsubscriber.lastseenobject); assertNull(testsubscriber.lastseencrystal); assertNull(testsubscriber.lastseenbeam); assertNull(testsubscriber.lastseenwedge); assertEquals(testsubscriber.seenobjects, 0); assertEquals(testsubscriber.seenclose, 0); e.setCrystal(c); // One object sent assertEquals(testsubscriber.lastseenobject, c); assertEquals(testsubscriber.lastseencrystal, c); assertNull(testsubscriber.lastseenbeam); assertNull(testsubscriber.lastseenwedge); assertEquals(testsubscriber.seenobjects, 1); assertEquals(testsubscriber.seenclose, 0); // Subscribe second listener e.addObserver(testsubscriber); e.exposeWedge(w); // Three objects sent (1 + 2x1) assertEquals(testsubscriber.lastseenobject, w); assertEquals(testsubscriber.lastseencrystal, c); assertNull(testsubscriber.lastseenbeam); assertEquals(testsubscriber.lastseenwedge, w); assertEquals(testsubscriber.seenobjects, 3); assertEquals(testsubscriber.seenclose, 0); // Null values should be handled gracefully and ignored e.setBeam(null); e.setCrystal(null); e.exposeWedge(null); // Three objects sent (1 + 2x1) assertEquals(testsubscriber.lastseenobject, w); assertEquals(testsubscriber.lastseencrystal, c); assertNull(testsubscriber.lastseenbeam); assertEquals(testsubscriber.lastseenwedge, w); assertEquals(testsubscriber.seenobjects, 3); assertEquals(testsubscriber.seenclose, 0); // Subscribe third listener e.addObserver(testsubscriber); e.setBeam(b); // Six objects sent (1 + 2x1 + 3x1) assertEquals(testsubscriber.lastseenobject, b); assertEquals(testsubscriber.lastseencrystal, c); assertEquals(testsubscriber.lastseenbeam, b); assertEquals(testsubscriber.lastseenwedge, w); assertEquals(testsubscriber.seenobjects, 6); assertEquals(testsubscriber.seenclose, 0); e.close(); // Output flushed (3x 1) assertEquals(testsubscriber.lastseenobject, b); assertEquals(testsubscriber.lastseencrystal, c); assertEquals(testsubscriber.lastseenbeam, b); assertEquals(testsubscriber.lastseenwedge, w); assertEquals(testsubscriber.seenobjects, 6); assertEquals(testsubscriber.seenclose, 3); System.out.println("@Test - testExperimentComplex"); } private static class OutputTestSubscriber implements Output { public long seenobjects = 0; public long seenclose = 0; public Object lastseenobject = null; public Crystal lastseencrystal = null; public Beam lastseenbeam = null; public Wedge lastseenwedge = null; @Override public void publishCrystal(Crystal c) { seenobjects = seenobjects + 1; lastseenobject = c; lastseencrystal = c; } @Override public void publishBeam(Beam b) { seenobjects = seenobjects + 1; lastseenobject = b; lastseenbeam = b; } @Override public void publishWedge(Wedge w) { seenobjects = seenobjects + 1; lastseenobject = w; lastseenwedge = w; } @Override public void close() { seenclose = seenclose + 1; } } }
package org.entitymatcher; import static org.entitymatcher.Builder.Mode.GROUPBY; import static org.entitymatcher.Builder.Mode.ORDERBY; import static org.entitymatcher.Builder.Mode.SELECT; import static org.entitymatcher.Builder.Mode.STATEMENTS; import static org.entitymatcher.Builder.Order.ASC; import static org.entitymatcher.EntityMatcher.camelDown; import static org.entitymatcher.EntityMatcher.camelUp; import static org.entitymatcher.EntityMatcher.isGetter; import static org.entitymatcher.EntityMatcher.toAlias; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import javax.persistence.Column; import javax.persistence.EntityManager; import javax.persistence.Query; import javax.persistence.Table; import javax.persistence.Transient; import org.entitymatcher.Statement.Part; import com.google.common.base.Function; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.MultimapBuilder; public class Builder<T> extends InvokationCapturer { enum Mode { SELECT, STATEMENTS, HAVING, GROUPBY, ORDERBY }; public enum Order { ASC, DESC }; private final Class<T> defaultRetType; private boolean nativeQuery = false; private final ParameterBinding params = new JpqlBinding(); private final Set<String> tableNames = new LinkedHashSet<>(); private final ListMultimap<Mode, CapturedStatement> map = MultimapBuilder.linkedHashKeys().arrayListValues().build(); private Order order = null; Builder(T main, Object... others) { defaultRetType = observe(main); tableNames.add(defaultRetType.getSimpleName()); observe(others).forEach(c -> tableNames.add(c.getSimpleName())); } /** * Overrides the specified retType T. * <p> * Use : build(instance).select(instance.getBar(), instance.getFoo()).match(...).build(clazz) * <p> * Where bar and foo are the desired constants and clazz is a java bean with bar and foo * properties. */ public Builder<T> select(Object... os) { return processLhsStatements(SELECT, os); } public Builder<T> orderBy(Object... os) { return orderBy(ASC, os); } public Builder<T> orderBy(Order order, Object... os) { this.order = order; return processLhsStatements(ORDERBY, os); } public Builder<T> groupBy(Object... os) { return processLhsStatements(GROUPBY, os); } public Builder<T> nativeQuery(boolean b) { nativeQuery = b; return this; } private List<CapturedStatement> mode(Mode mode) { return map.get(mode); } Builder<T> processLhsStatements(Mode mode, Object... os) { final List<CapturedStatement> capturedStatements = mode(mode); capturedStatements.clear(); Arrays.asList(os).forEach(o -> capturedStatements.add(captureLhsStatement(o))); return this; } CapturedStatement captureLhsStatement(Object o) { if (o instanceof LhsRhsStatement) return new CapturedStatement(extractTableColumn(getLastCapture()), null, (LhsRhsStatement<?>) o); else return new CapturedStatement(extractTableColumn(getLastCapture()), null, null); } public <E> LhsRhsStatementBuilder match(E getter, LhsRhsStatement<? extends E> statement) { return match(statement); } public <E> LhsRhsStatementBuilder match(LhsRhsStatement<E> statement) { final TableColumn lhs = extractTableColumn(getLastCapture()); final TableColumn rhs = extractTableColumn(getLastCapture()); return new LhsRhsStatementBuilder(new CapturedStatement(lhs, rhs, statement)); } private final TableColumn nullValue = new TableColumn(null, null); private TableColumn extractTableColumn(Capture capture) { if (capture == null) return nullValue; else return new TableColumn(getTable(capture), getColumn(capture)); } private String getTableName(TableColumn tc) { return tc != null && tc.table != null ? getTableName(tc.table) : null; } private Class<?> getTable(Capture c) { return c == null ? null : c.method.getDeclaringClass(); } private String getTableName(Class<?> clazz) { if (nativeQuery) { final Table table = clazz.getAnnotation(Table.class); final String tableName = table == null || table.name().isEmpty() ? clazz.getSimpleName() : table.name(); return tableName; } else return clazz.getSimpleName(); } private String getColumnName(TableColumn tc) { return tc != null && tc.column != null ? getColumnName(tc.column) : null; } private Class<?> getColumnType(TableColumn tc) { return tc != null && tc.column != null ? tc.column.getType() : null; } private Field getColumn(Capture c) { return c == null ? null : getColumn(c.method); } private Field getColumn(Method m) { final Matcher matcher = isGetter.matcher(m.getName()); if (matcher.matches()) { final String fieldName = camelDown(matcher.group(2)); return getField(m.getDeclaringClass(), fieldName); } throw new IllegalArgumentException("Not a getter '" + m.getName() + "'"); } private Field getField(Class<?> type, String name) { try { return type.getDeclaredField(name); } catch (NoSuchFieldException | SecurityException e) { throw new IllegalArgumentException(type.getName() + "doesn't follow the java beans convention for field: " + name); } } private String getColumnName(Field f) { if (nativeQuery) { final Column c = f.getAnnotation(Column.class); return c == null || c.name().isEmpty() ? f.getName() : c.name(); } else return f.getName(); } @Override public String toString() { return composeStringQuery(); } public PreparedQuery<T> build() { return build(defaultRetType); } public <E> PreparedQuery<E> build(Class<E> clazz) { if (mode(SELECT).isEmpty()) { if (nativeQuery) selectFields(clazz); else return createPackedSelectQuery(clazz); } return createUnpackedSelectQuery(clazz); } private void selectFields(Class<?> clazz) { final List<CapturedStatement> select = mode(SELECT); for (Field f : clazz.getDeclaredFields()) { if (isTransient(f)) continue; select.add(new CapturedStatement(new TableColumn(clazz, f), null, null)); } } // JPA transient considered fields (final, static, transient or annotated as Transient) private boolean isTransient(Field f) { final int mod = f.getModifiers(); return f.isAnnotationPresent(Transient.class) || Modifier.isFinal(mod) || Modifier.isStatic(mod) || Modifier.isTransient(mod); } private <E> PreparedQueryImpl<E> createPackedSelectQuery(Class<E> clazz) { return new PreparedQueryImpl<E>() { @Override @SuppressWarnings("unchecked") public E getSingleMatching(EntityManager em) { return (E) createQuery(em, nativeQuery).getSingleResult(); } @Override @SuppressWarnings("unchecked") public List<E> getMatching(EntityManager em) { return createQuery(em, nativeQuery).getResultList(); } }; } private <E> PreparedQueryImpl<E> createUnpackedSelectQuery(Class<E> clazz) { return new PreparedQueryImpl<E>() { @SuppressWarnings("unchecked") private Function<Object[], E> copyProperties = os -> { try { // If the value is null or its type is assignable to clazz, just cast it. if (os.length == 1) { if (os[0] == null) return null; else if (clazz.isAssignableFrom(os[0].getClass())) return (E) os[0]; // safe } // If multiple values or single not assignable, assign the received values in // the selected order to their corresponding properties. final E e = clazz.newInstance(); for (int i = 0; i < os.length; i++) { final CapturedStatement c = mode(SELECT).get(i); clazz.getMethod("set".concat(camelUp(getColumnName(c.lhs))), getColumnType(c.lhs)).invoke(e, os[i]); } return e; } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | SecurityException | IllegalArgumentException | InvocationTargetException e) { throw new IllegalArgumentException( "The class provided does not contain the requested named fields, is not a java bean or hasn't a default public ctor"); } }; @Override public E getSingleMatching(EntityManager em) { final Object singleResult = createQuery(em, nativeQuery).getSingleResult(); if (singleResult != null) { if (singleResult.getClass().isArray()) return copyProperties.apply((Object[]) singleResult); else return copyProperties.apply(new Object[] { singleResult }); } return null; } @SuppressWarnings("unchecked") @Override public List<E> getMatching(EntityManager em) { return Lists.transform(createQuery(em, nativeQuery).getResultList(), copyProperties); } }; } String composeStringQuery() { final Set<String> unaliasedTables = new LinkedHashSet<>(Builder.this.tableNames); final StringBuilder selectClause = new StringBuilder().append("SELECT "); final StringBuilder fromClause = new StringBuilder().append(" FROM "); processSelect(selectClause, fromClause, unaliasedTables); final StringBuilder whereClause = new StringBuilder(); processStatements(whereClause, fromClause, unaliasedTables); final StringBuilder groupBy = new StringBuilder(); processGroupBy(groupBy); final StringBuilder orderBy = new StringBuilder(); processOrderBy(orderBy); return new StringBuilder(selectClause).append(removeLastComma(fromClause)).append(whereClause).append(groupBy) .append(orderBy) .toString(); } private void processSelect(StringBuilder selectClause, StringBuilder fromClause, Set<String> unaliasedTables) { if (mode(SELECT).isEmpty()) { if (nativeQuery) appendUnpackedSelect(selectClause, fromClause, unaliasedTables); else appendPackedSelect(selectClause, fromClause, unaliasedTables); } else appendUnpackedSelect(selectClause, fromClause, unaliasedTables); } private void appendPackedSelect(StringBuilder selectClause, StringBuilder fromClause, Set<String> unaliasedTables) { final String tableName = getTableName(defaultRetType); unaliasedTables.remove(tableName); selectClause.append(toAlias(tableName)); fromClause.append(tableName).append(" ").append(toAlias(tableName)).append(", "); } private String appendUnpackedSelect(StringBuilder selectClause, StringBuilder fromClause, Set<String> unaliasedTables) { for (Iterator<CapturedStatement> it = mode(SELECT).iterator(); it.hasNext();) { final CapturedStatement next = it.next(); final String tableName = getTableName(next.lhs); final String tableAlias = toAlias(tableName); final String columnName = getColumnName(next.lhs); if (unaliasedTables.contains(tableName)) { fromClause.append(tableName).append(" ").append(tableAlias).append(", "); unaliasedTables.remove(tableName); } if (next.statement != null) selectClause.append(Statement.toString(next.statement.toJpql(tableAlias, columnName, null, null, params))); else selectClause.append(tableAlias).append(".").append(columnName); selectClause.append(it.hasNext() ? ", " : ""); } return selectClause.toString(); } private void processStatements(StringBuilder whereClause, StringBuilder fromClause, Set<String> unaliasedTables) { final List<CapturedStatement> statements = mode(STATEMENTS); if (!statements.isEmpty()) { whereClause.append(" WHERE "); for (CapturedStatement captured : statements) { final LhsRhsStatement<?> lhsRhs = captured.statement; if (lhsRhs.isJoinRelationship()) { final String lhsTableName = getTableName(captured.lhs); final List<Part> jpql = lhsRhs.toJpql(toAlias(lhsTableName), getColumnName(captured.lhs), toAlias(getTableName(captured.rhs)), getColumnName(captured.rhs), params); fromClause.append(Statement.toString(jpql)).append(", "); // lhsTableName has been assigned an alias in the FROM clause unaliasedTables.remove(lhsTableName); } else { // Add aliases to the FROM clause if needed final String lhsTableName = getTableName(captured.lhs); final String rhsTableName = getTableName(captured.rhs); if (lhsTableName != null && unaliasedTables.contains(lhsTableName)) { fromClause.append(lhsTableName).append(" ").append(toAlias(lhsTableName)).append(", "); unaliasedTables.remove(lhsTableName); } if (rhsTableName != null && unaliasedTables.contains(rhsTableName)) { fromClause.append(rhsTableName).append(" ").append(toAlias(rhsTableName)).append(", "); unaliasedTables.remove(rhsTableName); } // Add WHERE conditions final List<Part> jpql = lhsRhs.toJpql(toAlias(lhsTableName), getColumnName(captured.lhs), toAlias(rhsTableName), getColumnName(captured.rhs), params); whereClause.append(Statement.toString(jpql)); } } } } private void processOrderBy(StringBuilder orderBy) { final List<CapturedStatement> captures = mode(ORDERBY); if (!captures.isEmpty()) { orderBy.append(" ORDER BY "); appendLhsStatements(orderBy, captures.iterator()); orderBy.append(" ").append(order.name()); } } private void processGroupBy(StringBuilder groupBy) { final List<CapturedStatement> captures = mode(GROUPBY); if (!captures.isEmpty()) { groupBy.append(" GROUP BY "); appendLhsStatements(groupBy, captures.iterator()); } } private void appendLhsStatements(StringBuilder sb, Iterator<CapturedStatement> it) { for (; it.hasNext();) { final CapturedStatement next = it.next(); sb.append(toAlias(getTableName(next.lhs))).append(".").append(getColumnName(next.lhs)); if (it.hasNext()) sb.append(", "); } } // It is actually easier to remove a known last comma, that to handle all unknowns while // iterating (transient, no statements, joins, ...) private StringBuilder removeLastComma(final StringBuilder sb) { return sb.length() == 0 ? sb : sb.replace(sb.length() - 2, sb.length(), ""); } public static interface PreparedQuery<T> { T getSingleMatching(EntityManager em); List<T> getMatching(EntityManager em); } static class TableColumn { final Class<?> table; final Field column; TableColumn(Class<?> table, Field column) { this.table = table; this.column = column; } } static class CapturedStatement { final TableColumn lhs; final TableColumn rhs; final LhsRhsStatement<?> statement; public CapturedStatement(TableColumn lhs, TableColumn rhs, LhsRhsStatement<?> statement) { if (lhs == null && rhs == null && statement == null) throw new IllegalArgumentException("Nothing captured."); this.lhs = lhs; this.rhs = rhs; this.statement = statement; } } abstract class PreparedQueryImpl<E> implements PreparedQuery<E> { protected Query createQuery(EntityManager em, boolean nativeQuery) { final String queryString = composeStringQuery(); final Query query = nativeQuery ? em.createNativeQuery(queryString) : em.createQuery(queryString); params.solveQuery(queryString, query); return query; } @Override public String toString() { return composeStringQuery(); } } class LhsRhsStatementBuilder { LhsRhsStatementBuilder(CapturedStatement statement) { map.put(Mode.STATEMENTS, statement); } public LhsRhsStatementBuilder nativeQuery(boolean b) { nativeQuery = b; return this; } public <E> LhsRhsStatementBuilder and(E getter, LhsRhsStatement<E> statement) { return and(statement); } public <E> LhsRhsStatementBuilder and(LhsRhsStatement<E> statement) { final TableColumn lhs = extractTableColumn(getLastCapture()); final TableColumn rhs = extractTableColumn(getLastCapture()); if (lhs == null && rhs == null) throw new IllegalArgumentException( "Nothing captured. Likely an invalid statement (example: [valid -> 'and(instance.getSmth(), lt(-5).and(gt(5))']; [invalid -> 'and(instance.getSmth(), lt(-5)).and(gt(5)'])"); final List<CapturedStatement> statements = mode(STATEMENTS); statements.add(new CapturedStatement(null, null, LhsStatement.and)); statements.add(new CapturedStatement(lhs, rhs, statement)); return this; } public <E> LhsRhsStatementBuilder or(E getter, LhsRhsStatement<E> statement) { return or(statement); } public <E> LhsRhsStatementBuilder or(LhsRhsStatement<E> statement) { final TableColumn lhs = extractTableColumn(getLastCapture()); final TableColumn rhs = extractTableColumn(getLastCapture()); if (lhs == null && rhs == null) throw new IllegalArgumentException( "Nothing captured. Likely an invalid statement (example: [valid -> 'or(instance.getSmth(), like(\"foo\").or(gt(\"bar\"))']; [invalid -> 'or(instance.getSmth(), like(\"foo\")).or(like(\"bar\")'])"); final List<CapturedStatement> statements = mode(STATEMENTS); statements.add(new CapturedStatement(null, null, LhsStatement.or)); statements.add(new CapturedStatement(lhs, rhs, statement)); return this; } public PreparedQuery<T> build() { return Builder.this.build(); } public <E> PreparedQuery<E> build(Class<E> clazz) { return Builder.this.build(clazz); } } }
package org.iiitb.os.os_proj; import java.util.Date; public class File { private long id; private String name; private int filetypeId; private Date timestamp; private Date date_created; private Date date_updated; private Date user_created; private Date user_updated; private String path; private long file_size; private String data; public String getData() { return data; } public void setData(String data) { this.data = data; } public long getId() { return id; } public void setId(long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getFiletypeId() { return filetypeId; } public void setFiletypeId(int filetypeId) { this.filetypeId = filetypeId; } public Date getTimestamp() { return timestamp; } public void setTimestamp(Date timestamp) { this.timestamp = timestamp; } public Date getDate_created() { return date_created; } public void setDate_created(Date date_created) { this.date_created = date_created; } public Date getDate_updated() { return date_updated; } public void setDate_updated(Date date_updated) { this.date_updated = date_updated; } public Date getUser_created() { return user_created; } public void setUser_created(Date user_created) { this.user_created = user_created; } public Date getUser_updated() { return user_updated; } public void setUser_updated(Date user_updated) { this.user_updated = user_updated; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public long getFile_size() { return file_size; } public void setFile_size(long file_size) { this.file_size = file_size; } }
package org.jenetics.util; import static org.jenetics.util.Validator.nonNull; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; import javolution.xml.XMLObjectReader; import javolution.xml.XMLObjectWriter; import javolution.xml.XMLSerializable; import javolution.xml.stream.XMLStreamException; public final class IOUtils { private IOUtils() { } /** * By default all {@code read/writeXXX(Input/OutputStream)} methods closes * the given input/output stream. To prevent these methods to close the * streams you can wrap it in an non closable stream. * * [code] * final OutputStream out = ... * final XMLSerializable object = ... * try { * writeXML(nonClose(out), object); * // output stream is not closed and can still be used. * } finally { * closeQuietly(out); * } * [/code] * * @param out the output stream to wrap. * @return the wrapped output stream. Calls to the {@link OutputStream#close()} * will flush the stream and leave the stream open. * @throws NullPointerException if the given stream is {@code null}. */ public static OutputStream nonClose(final OutputStream out) { return new NonClosableOutputStream(nonNull(out, "Output stream")); } /** * By default all {@code read/writeXXX(Input/OutputStream)} methods closes * the given input/output stream. To prevent these methods to close the * streams you can wrap it in an non closable stream. * * [code] * final InputStream in = ... * final XMLSerializable object = ... * try { * Object obj = readXML(nonClose(out)); * // input stream is not closed and can still be used. * } finally { * closeQuietly(in); * } * [/code] * * @param out the output stream to wrap. * @return the wrapped output stream. Calls to the {@link InputStream#close()} * will leave the stream open. * @throws NullPointerException if the given stream is {@code null}. */ public static InputStream nonClose(final InputStream out) { return new NonClosableInputStream(nonNull(out, "Input stream")); } /** * Closes the given {@code closeable}. {@code null} values are allowed. * IOExceptions are swallowed * * @param closeable the closeable to close. */ public static void closeQuietly(final Closeable closeable) { try { if (closeable != null) { closeable.close(); } } catch (IOException ignore) { } } /** * Closes the given {@code writer}. {@code null} values are allowed. * XMLStreamException are swallowed * * @param writer the writer to close. */ public static void closeQuietly(final XMLObjectWriter writer) { try { if (writer != null) { writer.close(); } } catch (XMLStreamException ignore) { } } /** * Closes the given {@code reader}. {@code null} values are allowed. * XMLStreamException are swallowed * * @param reader the reader to close. */ public static void closeQuietly(final XMLObjectReader reader) { try { if (reader != null) { reader.close(); } } catch (XMLStreamException ignore) { } } /** * Write the XML serializable object to the given output stream. The output * stream is closed by this method. * * @see #nonClose(OutputStream) * * @param out the output stream. * @param object the object to serialize. * @throws NullPointerException if one of the arguments is {@code null}. * @throws XMLStreamException if the object could not be serialized. */ public static <T extends XMLSerializable> void writeXML( final OutputStream out, final T object ) throws XMLStreamException { Validator.nonNull(out, "Output stream"); Validator.nonNull(object, "Object"); XMLObjectWriter writer = null; try { writer = XMLObjectWriter.newInstance(out); writer.setIndentation("\t"); writer.write(object); } finally { closeQuietly(writer); } } /** * Write the XML serializable object to the given path. * * @param path the output path. * @param object the object to serialize. * @throws NullPointerException if one of the arguments is {@code null}. * @throws XMLStreamException if the object could not be serialized. */ public static <T extends XMLSerializable> void writeXML( final File path, final T object ) throws XMLStreamException, FileNotFoundException { writeXML(new FileOutputStream(path), object); } /** * Write the XML serializable object to the given path. * * @param path the output path. * @param object the object to serialize. * @throws NullPointerException if one of the arguments is {@code null}. * @throws XMLStreamException if the object could not be serialized. */ public static <T extends XMLSerializable> void writeXML( final String path, final T object ) throws XMLStreamException, FileNotFoundException { writeXML(new File(path), object); } /** * Reads an object (which was serialized by the * {@link #writeXML(OutputStream, XMLSerializable)} method) from the given * input stream. The input stream is closed by this method. * * @param in the input stream to read from. * @return the de-serialized object. * @throws NullPointerException if the input stream {@code in} is {@code null}. * @throws XMLStreamException if the object could not be read. */ public static <T> T readXML(final Class<T> type, final InputStream in) throws XMLStreamException { Validator.nonNull(type, "Object type"); Validator.nonNull(in, "Input stream"); final XMLObjectReader reader = XMLObjectReader.newInstance(in); try { return type.cast(reader.read()); } finally { closeQuietly(reader); } } /** * Reads an object (which was serialized by the * {@link #writeXML(File, XMLSerializable)} method) from the given path. * * @param path the path to read from. * @return the de-serialized object. * @throws NullPointerException if the input stream {@code in} is {@code null}. * @throws XMLStreamException if the object could not be read. */ public static <T> T readXML(final Class<T> type, final File path) throws FileNotFoundException, XMLStreamException { return readXML(type, new FileInputStream(path)); } /** * Reads an object (which was serialized by the * {@link #writeXML(File, XMLSerializable)} method) from the given path. * * @param path the path to read from. * @return the de-serialized object. * @throws NullPointerException if the input stream {@code in} is {@code null}. * @throws XMLStreamException if the object could not be read. */ public static <T> T readXML(final Class<T> type, final String path) throws FileNotFoundException, XMLStreamException { return readXML(type, new File(path)); } /** * Write the serializable object to the given output stream. The output * stream is closed by this method. * @param out the output stream. * @param object the object to serialize. * * @throws NullPointerException if one of the arguments is {@code null}. * @throws IOException if the object could not be serialized. */ public static void writeObject( final OutputStream out, final Serializable object ) throws IOException { Validator.nonNull(out, "Output"); Validator.nonNull(object, "Object"); final ObjectOutputStream oout = new ObjectOutputStream(out); try { oout.writeObject(object); } finally { closeQuietly(oout); } } /** * Write the serializable object to the given output stream. * @param path the output paths. * @param object the object to serialize. * * @throws NullPointerException if one of the arguments is {@code null}. * @throws IOException if the object could not be serialized. */ public static void writeObject( final File path, final Serializable object ) throws IOException { Validator.nonNull(path, "Path"); writeObject(new FileOutputStream(path), object); } /** * Write the serializable object to the given output stream. * @param path the output paths. * @param object the object to serialize. * * @throws NullPointerException if one of the arguments is {@code null}. * @throws IOException if the object could not be serialized. */ public static void writeObject( final String path, final Serializable object ) throws IOException { writeObject(new File(path), object); } /** * Reads an object (which was serialized by the * {@link #writeObject(OutputStream, Serializable)} method) from the given * input stream. The input stream is not closed by this method. * * @param in the input stream to read from. * @return the de-serialized object. * @throws NullPointerException if the input stream {@code in} is {@code null}. * @throws IOException if the object could not be read. */ public static <T extends Serializable> T readObject( final Class<T> type, final InputStream in ) throws IOException { Validator.nonNull(type, "Object type"); Validator.nonNull(in, "Input stream"); T object = null; final ObjectInputStream oin = new ObjectInputStream(in); try { object = type.cast(oin.readObject()); } catch (ClassNotFoundException e) { throw new IOException(e); } finally { closeQuietly(oin); } return object; } /** * Reads an object (which was serialized by the * {@link #writeObject(File, Serializable)} method) from the given * input path. * * @param path the input path to read from. * @return the de-serialized object. * @throws NullPointerException if the input stream {@code in} is {@code null}. * @throws IOException if the object could not be read. */ public static <T extends Serializable> T readObject( final Class<T> type, final File path ) throws IOException { Validator.nonNull(path, "Path"); return readObject(type, new FileInputStream(path)); } /** * Reads an object (which was serialized by the * {@link #writeObject(String, Serializable)} method) from the given * input path. * * @param path the input path to read from. * @return the de-serialized object. * @throws NullPointerException if the input stream {@code in} is {@code null}. * @throws IOException if the object could not be read. */ public static <T extends Serializable> T readObject( final Class<T> type, final String path ) throws IOException { Validator.nonNull(path, "Path"); return readObject(type, new FileInputStream(path)); } private static final class NonClosableOutputStream extends OutputStream { private final OutputStream _adoptee; public NonClosableOutputStream(final OutputStream adoptee) { _adoptee = adoptee; } @Override public void close() throws IOException { //Ignore close call. _adoptee.flush(); } @Override public boolean equals(Object obj) { return _adoptee.equals(obj); } @Override public void flush() throws IOException { _adoptee.flush(); } @Override public int hashCode() { return _adoptee.hashCode(); } @Override public String toString() { return _adoptee.toString(); } @Override public void write(byte[] b, int off, int len) throws IOException { _adoptee.write(b, off, len); } @Override public void write(byte[] b) throws IOException { _adoptee.write(b); } @Override public void write(int b) throws IOException { _adoptee.write(b); } } private static final class NonClosableInputStream extends InputStream { private final InputStream _adoptee; public NonClosableInputStream(final InputStream adoptee) { _adoptee = adoptee; } @Override public int available() throws IOException { return _adoptee.available(); } @Override public void close() throws IOException { } @Override public void mark(int readlimit) { _adoptee.mark(readlimit); } @Override public boolean markSupported() { return _adoptee.markSupported(); } @Override public int read() throws IOException { return _adoptee.read(); } @Override public int read(byte[] b, int off, int len) throws IOException { return _adoptee.read(b, off, len); } @Override public int read(byte[] b) throws IOException { return _adoptee.read(b); } @Override public void reset() throws IOException { _adoptee.reset(); } @Override public long skip(long n) throws IOException { return _adoptee.skip(n); } } }
package org.ndexbio.sync; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Calendar; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.logging.Logger; import org.ndexbio.model.exceptions.NdexException; import org.ndexbio.model.object.NdexProvenanceEventType; import org.ndexbio.model.object.Permissions; import org.ndexbio.model.object.ProvenanceEntity; import org.ndexbio.model.object.ProvenanceEvent; import org.ndexbio.model.object.Request; import org.ndexbio.model.object.network.Network; import org.ndexbio.model.object.network.NetworkSummary; import org.ndexbio.model.tools.PropertyHelpers; import org.ndexbio.model.tools.ProvenanceHelpers; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonSubTypes.Type; import com.fasterxml.jackson.core.JsonProcessingException; @JsonIgnoreProperties(ignoreUnknown = true) @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "planType") @JsonSubTypes(value = { @Type(value = QueryCopyPlan.class, name = "QueryCopyPlan"), @Type(value = IdCopyPlan.class, name = "IdCopyPlan") }) public abstract class CopyPlan implements NdexProvenanceEventType { protected final static Logger LOGGER = Logger.getLogger(CopyPlan.class.getName()); NdexServer source; NdexServer target; String targetGroupName; String planFileName; List<NetworkSummary> sourceNetworks; List<NetworkSummary> targetCandidates; Map<String, ProvenanceEntity> provenanceMap; boolean updateTargetNetwork = false; boolean updateReadOnlyNetwork = false; public void process() throws JsonProcessingException, IOException, NdexException { source.initialize(); target.initialize(); provenanceMap = new HashMap<>(); findSourceNetworks(); getAllSourceProvenance(); findTargetCandidates(); getAllTargetProvenance(); if (updateTargetNetwork) { // update network(s) on the target server for (NetworkSummary network: sourceNetworks) { updateTargetNetwork(network); } } else { // copy source network(s) from source server to target for (NetworkSummary network: sourceNetworks) { copySourceNetwork(network); } } } // Find networks in target NDEx in the target account. // the account is always the target user account. // the number of networks queried is limited to 100 private void findTargetCandidates() throws JsonProcessingException, IOException { targetCandidates = target.getNdex().findNetworks("", true, target.getUsername(), Permissions.ADMIN, false, 0, 100); LOGGER.info("Found " + targetCandidates.size() + " networks in target NDEx under " + target.getUsername()); } public abstract void findSourceNetworks() throws NdexException; // Get the provenance history for each candidate network in the target account private void getAllTargetProvenance() throws JsonProcessingException, IOException, NdexException { LOGGER.info("Getting provenance history for " + targetCandidates.size() + " candidate networks in target account"); getAllProvenance(target, targetCandidates); } // Get the provenance history for each source network private void getAllSourceProvenance() throws JsonProcessingException, IOException, NdexException { LOGGER.info("Getting Source Network Provenance for " + sourceNetworks.size() + " networks"); getAllProvenance(source, sourceNetworks); } // Get the provenance history for a list of networks // Store by UUID in the provenance map private void getAllProvenance(NdexServer server, List<NetworkSummary> networks) { //throws JsonProcessingException, IOException, NdexException{ ArrayList<NetworkSummary> networksNotToCopy = new ArrayList<NetworkSummary>(); for (NetworkSummary network : networks) { try { ProvenanceEntity provenance = server.getNdex().getNetworkProvenance(network.getExternalId().toString()); if (null != provenance) { LOGGER.info("Storing Provenance for network " + network.getExternalId()); provenanceMap.put(network.getExternalId().toString(), provenance); } } catch (IOException | NdexException e) { // unable to read this networks' provenance. It means we won't be able to copy/update it. // Let's save it in the networksNotToCopy list and remove it from the copy plan later. networksNotToCopy.add(network); System.out.println(e.getMessage()); continue; } } for (NetworkSummary network : networksNotToCopy) { // remove networks whose provenance we couldn't read, since we can't copy/update these networks networks.remove(network); } } // Process one source network private void updateTargetNetwork(NetworkSummary sourceNetwork) throws JsonProcessingException, IOException, NdexException { LOGGER.info("Trying to update target network created from source " + sourceNetwork.getName() + " ; source last modified " + sourceNetwork.getModificationTime()); // for targetCandidate, get provenance history and determine whether the target candidate // is a first generation copy of the source network. boolean copySourceNetwork = true; String sourceNetworkUUID = sourceNetwork.getExternalId().toString(); String parentNetworkUUID = null; String parentEntityUri = null; // Get the provenance history of the source from the provenance map ProvenanceEntity sourceRootProvenanceEntity = provenanceMap.get(sourceNetwork.getExternalId().toString()); // Evaluate all targetCandidates to see if there is an existing copy of the source // and whether that copy needs update for (NetworkSummary targetCandidate : targetCandidates) { // get provenance of the target network from the server ProvenanceEntity targetRootProvenanceEntity = provenanceMap.get(targetCandidate.getExternalId().toString()); if (null == targetRootProvenanceEntity){ // no provenance root entity, hence unknown status LOGGER.info("No provenance entity exists for target " + targetCandidate.getExternalId()); continue; // get next target network } LOGGER.info("Processing provenance history for target " + targetCandidate.getExternalId()); ProvenanceEvent targetProvenanceEvent = targetRootProvenanceEntity.getCreationEvent(); if (null == targetProvenanceEvent) { LOGGER.info("No provenance event exists for target " + targetCandidate.getExternalId()); continue; // get next target network } if (SNYC_COPY.equalsIgnoreCase(targetProvenanceEvent.getEventType())) { // COPY was the latest event for the current target; let's get UUID of the parent network if ((targetRootProvenanceEntity.getProperties() != null) && (targetRootProvenanceEntity.getProperties().size() >= 3) ) { parentEntityUri = targetRootProvenanceEntity.getProperties().get(2).getValue(); // get URI of parent network try { //extract UUID from URI URI uri = new URI(parentEntityUri); String[] segments = uri.getPath().split("/"); parentNetworkUUID = segments[segments.length-1]; //parentNetworkUUID = parentEntityUri.replaceFirst(".*/([^/?]+).*", "$1"); // this is another way of extracting UUID of parent network LOGGER.info("UUID of the parent network found in provenance of target network " + targetCandidate.getExternalId() + " is " + parentNetworkUUID); } catch (URISyntaxException e) { LOGGER.info("Unable to get UUID of the parent network from provenance of target network " + targetCandidate.getExternalId() + " Exception thrown: " + e.getMessage()); continue; // get next target network } // if we reached this point, it means we found/extracted from the provenace of target network the UUID of the // network that created this target network by COPY and that COPY was the last event of the target network // (target network was not modified after that). // Let's check UUIDs of source and target networks. if (!sourceNetworkUUID.equals(parentNetworkUUID)) { // this target network was NOT created from the current source network, // therefore, we cannot update it // LOGGER.info("sourceNetworkUUID " + sourceNetworkUUID + "doesn't equal parentNetworkUUID " + parentNetworkUUID); continue; // get next target network } if (null == sourceRootProvenanceEntity){ // no provenance root entity, hence unknown status LOGGER.info("No provenance entity exists for source network" + sourceNetwork.getExternalId().toString()); continue; // get next target network } ProvenanceEvent sourceProvenanceEvent = sourceRootProvenanceEntity.getCreationEvent(); if (null == sourceProvenanceEvent) { LOGGER.info("No provenance event exists for source " + sourceNetwork.getExternalId().toString()); continue; // get next target network } LOGGER.info("sourceNetworkUUID " + sourceNetworkUUID + " equals parentNetworkUUID " + parentNetworkUUID); // target network was created from source network and was not modified after that (last target event was COPY). // Let's check if target network is "out-of-date". // calculate latestSourceDate as the later of modification date and the last provenance history event end date for the source network. Timestamp latestSourceDate = (sourceNetwork.getModificationTime().after((Timestamp)sourceProvenanceEvent.getEndedAtTime())) ? sourceNetwork.getModificationTime() : ((Timestamp)sourceProvenanceEvent.getEndedAtTime()); // calculate earliestTargetDate as the earlier of modification date and the last provenance history event end date for the target network. Timestamp earliestTargetDate = (targetCandidate.getModificationTime().before((Timestamp)targetProvenanceEvent.getEndedAtTime())) ? targetCandidate.getModificationTime() : ((Timestamp) targetProvenanceEvent.getEndedAtTime()); // System.out.println("sourceNetwork.getModificationTime()=" + sourceNetwork.getModificationTime() + " " + // " (Timestamp)sEvent.getEndedAtTime()=" + (Timestamp)sourceProvenanceEvent.getEndedAtTime()); // System.out.println("targetCandidate.getModificationTime()=" + targetCandidate.getModificationTime() + " " + // " pEvent.getEndedAtTime()=" + (Timestamp) targetProvenanceEvent.getEndedAtTime()); //System.out.println("latestSourceDate=" + latestSourceDate.toString() + " earliestTargetDate= " + earliestTargetDate.toString() ); if (latestSourceDate.before(earliestTargetDate)) { // target network update/modify time is more recent than that of source network; don't update target LOGGER.info("latestSourceDate = " + latestSourceDate.toString() + "; earliestTargetDate = " + earliestTargetDate.toString() + ". Not updating target."); // since there exists a copy of the source network on the target server that doesn't require updating, // we will not copy this source network to target. copySourceNetwork = false; continue; // get next target network } // let's check if the target network is read-only, and if yes, check the value of updateReadOnlyNetwork // configuration parameter. To check if target is read-only, if (targetCandidate.getReadOnlyCommitId() > 0). if ((targetCandidate.getReadOnlyCommitId() > 0) && (false == updateReadOnlyNetwork)) { // the target is read-only and updateReadOnlyNetwork config parameter is false, don't update target LOGGER.info("Target network " + targetCandidate.getExternalId() + " is read-only and updateReadOnlyNetwork is false. Not updating target."); copySourceNetwork = false; continue; // get next target network } // finally, update the target network if (targetCandidate.getReadOnlyCommitId() > 0) { // target network is read-only updateReadOnlyNetwork(sourceNetwork, targetCandidate); copySourceNetwork = false; } else { // target network is not read-only updateNetwork(sourceNetwork, targetCandidate); copySourceNetwork = false; } } else { // if ((pRoot.getProperties() != null) && (pRoot.getProperties().size() >= 3) ) LOGGER.info("Unable to get UUID of the parent network because the pav:retrievedFrom property is missing from provenance of target network " + targetCandidate.getExternalId()); continue; // get next target network } } else { // if (SNYC_COPY.equals(pEvent.getEventType())) { // the latest provenance event of the target network was not COPY. // This means that we will not update the current target network, but it is possible that // we'll have to copy source network over to the target (in case no target network // So, we'll traverse provenance from the latest event back to the earliest one to find the latest (most recent) COPY event. // If we find a COPY event, we'll compare UUID of the source network and network that created target. List<ProvenanceEntity> inputs = targetProvenanceEvent.getInputs(); if ((null == inputs) || (inputs.size() == 0)) { LOGGER.info("No provenance history exists for target " + targetCandidate.getExternalId()); continue; // get next source network } //copyEventFound = false; parentEntityUri = null; while(inputs != null) { if (SNYC_COPY.equalsIgnoreCase(inputs.get(0).getCreationEvent().getEventType())) { //copyEventFound = true; if ((inputs.get(0).getProperties() != null) && (inputs.get(0).getProperties().size() >= 3) ) { parentEntityUri = inputs.get(0).getProperties().get(2).getValue(); // get URI of parent network try { //extract UUID from URI URI uri = new URI(parentEntityUri); String[] segments = uri.getPath().split("/"); parentNetworkUUID = segments[segments.length-1]; //parentNetworkUUID = parentEntityUri.replaceFirst(".*/([^/?]+).*", "$1"); // this is another way of extracting UUID of parent network LOGGER.info("UUID of the parent network found in provenance of target network " + targetCandidate.getExternalId() + " is " + parentNetworkUUID); } catch (URISyntaxException e) { LOGGER.info("Unable to get UUID of the parent network from provenance of target network " + targetCandidate.getExternalId() + " Exception thrown: " + e.getMessage()); break; } if (sourceNetworkUUID.equals(parentNetworkUUID)) { // Don't copy source network over to target. copySourceNetwork = false; break; } } else { LOGGER.info("Unable to get UUID of the parent network because the pav:retrievedFrom property is missing from provenance of target network " + targetCandidate.getExternalId()); break; // break out of the loop } } inputs = inputs.get(0).getCreationEvent().getInputs(); } } } // we finished looping through the list of target networks. // If no copy of the source network exists on the target, then copy source network to target if (copySourceNetwork) { LOGGER.info("No target that is a copy of the source found, will therefore copy the network "); copyNetwork(sourceNetwork); copySourceNetwork = false; } } private void updateReadOnlyNetwork(NetworkSummary sourceNetwork, NetworkSummary targetNetwork) throws IOException, NdexException { String networkId = targetNetwork.getExternalId().toString(); try { // set target network to read-write mode target.getNdex().setNetworkFlag(networkId, "readOnly", "false"); } catch (Exception e) { LOGGER.severe("Error attempting to set readOnly flag to false for network " + sourceNetwork.getExternalId()); e.printStackTrace(); } // update target network updateNetwork(sourceNetwork, targetNetwork); try { // set target network back to read-only mode target.getNdex().setNetworkFlag(networkId, "readOnly", "true"); } catch (Exception e) { LOGGER.severe("Error attempting to set readOnly flag to true for network " + sourceNetwork.getExternalId()); e.printStackTrace(); } } private void updateNetwork(NetworkSummary sourceNetwork, NetworkSummary targetNetwork) throws IOException, NdexException { Network entireNetwork = source.getNdex().getNetwork(sourceNetwork.getExternalId().toString()); entireNetwork.setExternalId(targetNetwork.getExternalId()); try { // update target network NetworkSummary copiedNetwork = target.getNdex().updateNetwork(entireNetwork); LOGGER.info("Updated " + sourceNetwork.getExternalId() + " to " + copiedNetwork.getExternalId()); ProvenanceEntity newProvananceHistory = createCopyProvenance(copiedNetwork, sourceNetwork); target.getNdex().setNetworkProvenance(copiedNetwork.getExternalId().toString(), newProvananceHistory); LOGGER.info("Set provenance for copy " + copiedNetwork.getExternalId()); } catch (Exception e) { LOGGER.severe("Error attempting to copy " + sourceNetwork.getExternalId()); e.printStackTrace(); } } // Process one source network private void copySourceNetwork(NetworkSummary sourceNetwork) throws JsonProcessingException, IOException, NdexException { LOGGER.info("Processing source network " + sourceNetwork.getName() + " last modified " + sourceNetwork.getModificationTime()); // Get the provenance history of the source from the provenance map ProvenanceEntity sRoot = provenanceMap.get(sourceNetwork.getExternalId().toString()); // for targetCandidate, get provenance history and determine whether the target candidate // is a first generation copy of the source network. NetworkSummary targetNetwork = null; boolean targetNetworkNeedsUpdate = false; // Evaluate all targetCandidates to see if there is an existing copy of the source // and whether that copy needs update for (NetworkSummary targetCandidate : targetCandidates){ ProvenanceEntity pRoot = provenanceMap.get(targetCandidate.getExternalId().toString()); if (null == pRoot){ // no provenance root entity, hence unknown status LOGGER.info("No provenance history for target " + targetCandidate.getExternalId()); } else { LOGGER.info("Processing provenance history for target " + targetCandidate.getExternalId()); ProvenanceEvent pEvent = pRoot.getCreationEvent(); // is the creation event a copy? // TODO: checking for valid copy event: should have just one input if (null != pEvent && SNYC_COPY.equalsIgnoreCase(pEvent.getEventType())){ LOGGER.info("Found target candidate that is derived from a copy event "); List<ProvenanceEntity> inputs = pEvent.getInputs(); if (null != inputs && inputs.size() > 0){ // does the input UUID match source UUID? ProvenanceEntity input = inputs.get(0); if (input.getUri().equalsIgnoreCase(sRoot.getUri())){ // Yes, this is a copy of the source network LOGGER.info("Found direct copy of source network " + sRoot.getUri()); targetNetwork = targetCandidate; // Now check the modification date... if(sourceNetwork.getModificationTime().after(pEvent.getEndedAtTime())){ // The sourceNetwork is later than the end date of the copy event // Therefore we should update the target LOGGER.info("Source copy date is after target copy event, therefore needs update"); targetNetworkNeedsUpdate = true; break; } } } } else { LOGGER.info("No provenance event or not a copy event for " + targetCandidate.getExternalId()); // Most proximal event is not a copy, so this network cannot match the source, // Therefore do nothing } } } // now do a copy or update, if // 1. there is no target that is a copy or // 2. if there is a only a copy needing update. if (null != targetNetwork){ if (targetNetworkNeedsUpdate){ // overwrite target LOGGER.info("We have a target that is a copy needing update, but update is not implemented yet, so just making another copy."); copyNetwork(sourceNetwork); } else { LOGGER.info("We have a target that is an existing copy, but it does not need update, therefore not copying."); } } else { // no target found, copy network LOGGER.info("No target that is a copy of the source found, will therefore copy the network "); copyNetwork(sourceNetwork); } } private void copyNetwork(NetworkSummary sourceNetwork) throws IOException, NdexException{ Network entireNetwork = source.getNdex().getNetwork(sourceNetwork.getExternalId().toString()); try { // TODO create updated provenance history NetworkSummary copiedNetwork = target.getNdex().createNetwork(entireNetwork); LOGGER.info("Copied " + sourceNetwork.getExternalId() + " to " + copiedNetwork.getExternalId()); ProvenanceEntity newProvananceHistory = createCopyProvenance(copiedNetwork, sourceNetwork); target.getNdex().setNetworkProvenance(copiedNetwork.getExternalId().toString(), newProvananceHistory); LOGGER.info("Set provenance for copy " + copiedNetwork.getExternalId()); } catch (Exception e) { LOGGER.severe("Error attempting to copy " + sourceNetwork.getExternalId()); e.printStackTrace(); } } // Attributes to be read from file private ProvenanceEntity createCopyProvenance( NetworkSummary copiedNetwork, NetworkSummary sourceNetwork) { ProvenanceEntity sourceProvenanceEntity = provenanceMap.get(sourceNetwork.getExternalId().toString()); // If the source has no provenance history, we create a minimal // ProvenanceEntity that has the appropriate URI if (null == sourceProvenanceEntity){ sourceProvenanceEntity = new ProvenanceEntity(sourceNetwork, source.getNdex().getBaseRoute()); } // Create the history ProvenanceEntity copyProv = ProvenanceHelpers.createProvenanceHistory( copiedNetwork, copiedNetwork.getURI(), SNYC_COPY, new Timestamp(Calendar.getInstance().getTimeInMillis()), sourceProvenanceEntity ); // Add properties based on if (null != sourceNetwork.getName()){ PropertyHelpers.addProperty("dc:title", sourceNetwork.getName(), copyProv.getProperties()); } if (null != sourceNetwork.getDescription()){ PropertyHelpers.addProperty("dc:description", sourceNetwork.getName(), copyProv.getProperties()); } PropertyHelpers.addProperty("pav:retrievedFrom", sourceNetwork.getURI(), copyProv.getProperties()); return copyProv; } public String getTargetGroupName() { return targetGroupName; } public void setTargetAccountName(String targetGroupName) { this.targetGroupName = targetGroupName; } public String getPlanFileName() { return planFileName; } public void setPlanFileName(String planFileName) { this.planFileName = planFileName; } public NdexServer getSource() { return source; } public void setSource(NdexServer source) { this.source = source; } public NdexServer getTarget() { return target; } public void setTarget(NdexServer target) { this.target = target; } public boolean getUpdateTargetNetwork() { return updateTargetNetwork; } public void setUpdateTarget(boolean updateTargetNetwork) { this.updateTargetNetwork = updateTargetNetwork; } public boolean getUpdateReadOnlyNetwork() { return updateReadOnlyNetwork; } public void setUpdateReadOnlyNetwork(boolean updateReadOnlyNetwork) { this.updateReadOnlyNetwork = updateReadOnlyNetwork; } }
package otognan; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; @Configuration @EnableWebSecurity public class WebSecurityConfig extends WebSecurityConfigurerAdapter { @Override protected void configure(HttpSecurity http) throws Exception { http .authorizeRequests() .antMatchers("/", "/auth/facebook", "/signin/facebook").permitAll() .antMatchers("/hello").permitAll() .anyRequest().authenticated() .and() .formLogin() //.loginPage("/auth/facebook") //.loginPage("/login") //.permitAll() .and() .logout() .permitAll(); } @Autowired public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { auth .inMemoryAuthentication() .withUser("user").password("password").roles("USER"); } }
package seedu.doist.ui; import java.util.logging.Logger; import javafx.fxml.FXML; import javafx.scene.control.SplitPane; import javafx.scene.control.TextField; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.Region; import seedu.doist.commons.core.LogsCenter; import seedu.doist.commons.events.ui.NewResultAvailableEvent; import seedu.doist.commons.util.FxViewUtil; import seedu.doist.logic.Logic; import seedu.doist.logic.commands.CommandHistory; import seedu.doist.logic.commands.CommandResult; import seedu.doist.logic.commands.exceptions.CommandException; public class CommandBox extends UiPart<Region> { private final Logger logger = LogsCenter.getLogger(CommandBox.class); private static final String FXML = "CommandBox.fxml"; public static final String ERROR_STYLE_CLASS = "error"; private final Logic logic; @FXML private TextField commandTextField; public CommandBox(AnchorPane commandBoxPlaceholder, Logic logic) { super(FXML); this.logic = logic; addToPlaceholder(commandBoxPlaceholder); } private void addToPlaceholder(AnchorPane placeHolderPane) { SplitPane.setResizableWithParent(placeHolderPane, false); placeHolderPane.getChildren().add(commandTextField); FxViewUtil.applyAnchorBoundaryParameters(getRoot(), 0.0, 0.0, 0.0, 0.0); FxViewUtil.applyAnchorBoundaryParameters(commandTextField, 0.0, 0.0, 0.0, 0.0); } @FXML private void handleKeyPressed(KeyEvent event) { if (event.getCode() == KeyCode.ENTER) { handleEnterKey(); } else if (event.getCode() == KeyCode.UP) { // up and down arrow key will move the cursor to the position 0 // use consume() method to marks this Event as consumed. This stops such further propagation. event.consume(); handleUpKey(); } else if (event.getCode() == KeyCode.DOWN) { event.consume(); handleDownKey(); } } //Handles Down key press private void handleDownKey() { String userCommandText = CommandHistory.getNextCommand(); if (userCommandText == null) { setCommandInput(""); } else { setCommandInput(userCommandText); } } //Handle Up key press private void handleUpKey() { String userCommandText = CommandHistory.getPreviousCommand(); if (userCommandText == null) { setCommandInput(""); } else { setCommandInput(userCommandText); } } //Handle Enter key press private void handleEnterKey() { try { String userCommandText = commandTextField.getText(); manageCommandHistory(userCommandText); CommandResult commandResult = logic.execute(userCommandText); // process result of the command setStyleToIndicateCommandSuccess(); commandTextField.setText(""); logger.info("Result: " + commandResult.feedbackToUser); raise(new NewResultAvailableEvent(commandResult.feedbackToUser)); } catch (CommandException e) { // handle command failure setStyleToIndicateCommandFailure(); commandTextField.setText(""); logger.info("Invalid command: " + commandTextField.getText()); raise(new NewResultAvailableEvent(e.getMessage())); } } //Restores the command history pointer //Throws exception is 'add' fails private void manageCommandHistory(String userCommandText) { CommandHistory.restore(); if (!CommandHistory.addCommandHistory(userCommandText)) { throw new ArrayIndexOutOfBoundsException(); } } private void setCommandInput(String string) { commandTextField.setText(string); // move the cursor to the end of the input string commandTextField.positionCaret(string.length()); } /** * Sets the command box style to indicate a successful command. */ private void setStyleToIndicateCommandSuccess() { commandTextField.getStyleClass().remove(ERROR_STYLE_CLASS); } /** * Sets the command box style to indicate a failed command. */ private void setStyleToIndicateCommandFailure() { commandTextField.getStyleClass().add(ERROR_STYLE_CLASS); } }
package ui.issuepanel; import static ui.components.KeyboardShortcuts.DEFAULT_SIZE_WINDOW; import static ui.components.KeyboardShortcuts.JUMP_TO_FILTER_BOX; import static ui.components.KeyboardShortcuts.MAXIMIZE_WINDOW; import static ui.components.KeyboardShortcuts.MINIMIZE_WINDOW; import static ui.components.KeyboardShortcuts.SWITCH_BOARD; import ui.components.PanelMenuBar; import backend.interfaces.IModel; import backend.resource.TurboIssue; import backend.resource.TurboUser; import filter.ParseException; import filter.Parser; import filter.expression.FilterExpression; import filter.expression.Qualifier; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.geometry.Insets; import javafx.scene.Node; import javafx.scene.layout.HBox; import javafx.scene.control.Label; import javafx.scene.text.Text; import javafx.scene.input.KeyEvent; import ui.TestController; import ui.UI; import ui.components.FilterTextField; import util.events.*; import util.events.testevents.UIComponentFocusEvent; import prefs.PanelInfo; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; /** * An FilterPanel is a AbstractPanel meant for containing issues and an accompanying filter text field, * which specifies the issues to be contained within as well as their order. * * The FilterPanel does not perform the filtering itself - it merely specifies how filtering is to be done. * * The FilterPanel also does not specify how the list is to be displayed -- subclasses override methods * which determine that. */ public abstract class FilterPanel extends AbstractPanel { private ObservableList<TurboIssue> issuesToDisplay = null; public PanelMenuBar panelMenuBar; protected FilterTextField filterTextField; private UI ui; protected FilterExpression currentFilterExpression = Qualifier.EMPTY; public FilterPanel(UI ui, IModel model, PanelControl parentPanelControl, int panelIndex) { super(model, parentPanelControl, panelIndex); this.ui = ui; panelMenuBar = new PanelMenuBar(this, model, ui); getChildren().addAll(panelMenuBar, createFilterBox()); setUpEventHandler(); focusedProperty().addListener((unused, wasFocused, isFocused) -> { if (isFocused) { getStyleClass().add("panel-focused"); } else { getStyleClass().remove("panel-focused"); } }); setupKeyboardShortcuts(); } private void setupKeyboardShortcuts() { addEventHandler(KeyEvent.KEY_PRESSED, event -> { if (MAXIMIZE_WINDOW.match(event)) { ui.maximizeWindow(); } else if (MINIMIZE_WINDOW.match(event)) { ui.minimizeWindow(); } else if (DEFAULT_SIZE_WINDOW.match(event)) { ui.setDefaultWidth(); } else if (SWITCH_BOARD.match(event)) { ui.getMenuControl().switchBoard(); } else if (JUMP_TO_FILTER_BOX.match(event)) { setFocusToFilterBox(); } }); } private void setFocusToFilterBox() { if (TestController.isTestMode()) { ui.triggerEvent(new UIComponentFocusEvent(UIComponentFocusEvent.EventType.FILTER_BOX)); } filterTextField.requestFocus(); filterTextField.setText(filterTextField.getText().trim()); filterTextField.positionCaret(filterTextField.getLength()); } private void setUpEventHandler() { this.setOnMouseClicked(e-> { ui.triggerEvent(new PanelClickedEvent(this.panelIndex)); requestFocus(); }); } private final ModelUpdatedEventHandler onModelUpdate = e -> { // Update keywords List<String> all = new ArrayList<>(Qualifier.KEYWORDS); all.addAll(e.model.getUsers().stream() .map(TurboUser::getLoginName) .collect(Collectors.toList())); filterTextField.setKeywords(all); }; private Node createFilterBox() { filterTextField = new FilterTextField("", 0) .setOnConfirm((text) -> { applyStringFilter(text); return text; }) .setOnCancel(this::requestFocus); filterTextField.setId(model.getDefaultRepo() + "_col" + panelIndex + "_filterTextField"); filterTextField.setMinWidth(388); filterTextField.setMaxWidth(388); ui.registerEvent(onModelUpdate); filterTextField.setOnMouseClicked(e -> ui.triggerEvent(new PanelClickedEvent(panelIndex))); HBox layout = new HBox(); layout.getChildren().addAll(filterTextField); layout.setPadding(new Insets(0, 0, 3, 0)); setupPanelDragEvents(layout); return layout; } private void setupPanelDragEvents(Node dropNode) { dropNode.setOnDragEntered(e -> { if (parentPanelControl.getCurrentlyDraggedPanelIndex() != panelIndex) { // Apparently the dragboard can't be updated while // the drag is in progress. This is why we use an // external source for updates. assert parentPanelControl.getCurrentlyDraggedPanelIndex() != -1; int previous = parentPanelControl.getCurrentlyDraggedPanelIndex(); parentPanelControl.setCurrentlyDraggedPanelIndex(panelIndex); parentPanelControl.swapPanels(previous, panelIndex); } e.consume(); } ); dropNode.setOnDragExited(e -> { dropNode.getStyleClass().remove("dragged-over"); e.consume(); }); } // These two methods are triggered by the contents of the input area // changing. As such they should not be invoked manually, or the input // area won't update. private void applyStringFilter(String filterString) { try { FilterExpression filter = Parser.parse(filterString); if (filter != null) { this.applyFilterExpression(filter); } else { this.applyFilterExpression(Qualifier.EMPTY); } } catch (ParseException ex) { this.applyFilterExpression(Qualifier.EMPTY); // Overrides message in status bar UI.status.displayMessage("Panel " + (panelIndex + 1) + ": Parse error in filter: " + ex.getMessage()); } } /** * Triggered after pressing ENTER in the filter box. * * @param filter The current filter text in the filter box. */ private void applyFilterExpression(FilterExpression filter) { currentFilterExpression = filter; parentPanelControl.getGUIController().panelFilterExpressionChanged(this); } public void setFilterByString(String filterString) { filterTextField.setFilterText(filterString); } public FilterExpression getCurrentFilterExpression() { return currentFilterExpression; } public void restorePanel(String name, String filterString) { filterTextField.setFilterText(filterString); panelMenuBar.setPanelName(name); } public void startRename(){ panelMenuBar.initRenameableTextFieldAndEvents(); } public void setPanelName(String newName) { panelMenuBar.setPanelName(newName); } public PanelInfo getCurrentInfo() { return new PanelInfo(this.panelMenuBar.getPanelName(), filterTextField.getText()); } public ObservableList<TurboIssue> getIssueList() { return issuesToDisplay; } public Text getNameText() { return this.panelMenuBar.getNameText(); } public FilterTextField getFilterTextField() { return this.filterTextField; } public Label getRenameButton() { return this.panelMenuBar.getRenameButton(); } public Label getCloseButton() { return this.panelMenuBar.getCloseButton(); } public void setIssueList(List<TurboIssue> transformedIssueList) { this.issuesToDisplay = FXCollections.observableArrayList(transformedIssueList); } public void updatePanel(List<TurboIssue> filteredAndSortedIssues) { setIssueList(filteredAndSortedIssues); refreshItems(); } @Override public void close() { ui.unregisterEvent(onModelUpdate); } }
package viewer; import mpicbg.spim.data.SequenceDescription; import net.imglib2.RandomAccessible; import net.imglib2.RandomAccessibleInterval; import net.imglib2.RealRandomAccessible; import net.imglib2.interpolation.InterpolatorFactory; import net.imglib2.interpolation.randomaccess.NLinearInterpolatorFactory; import net.imglib2.interpolation.randomaccess.NearestNeighborInterpolatorFactory; import net.imglib2.realtransform.AffineTransform3D; import net.imglib2.type.numeric.NumericType; import viewer.render.Interpolation; import viewer.render.Source; public abstract class AbstractSpimSource< T extends NumericType< T > > implements Source< T > { protected int currentTimepoint; protected RandomAccessibleInterval< T >[] currentSources; protected RealRandomAccessible< T >[][] currentInterpolatedSources; protected final AffineTransform3D[] currentSourceTransforms; protected final int setup; protected final String name; protected final SequenceViewsLoader sequenceViews; protected final ViewerImgLoader imgLoader; protected final int numTimepoints; protected final int numMipmapLevels; protected final static int numInterpolationMethods = 2; protected final static int iNearestNeighborMethod = 0; protected final static int iNLinearMethod = 1; protected final InterpolatorFactory< T, RandomAccessible< T > >[] interpolatorFactories; @SuppressWarnings( "unchecked" ) public AbstractSpimSource( final SequenceViewsLoader loader, final int setup, final String name ) { this.setup = setup; this.name = name; this.sequenceViews = loader; final SequenceDescription seq = loader.getSequenceDescription(); imgLoader = ( ViewerImgLoader ) seq.imgLoader; numTimepoints = seq.numTimepoints(); numMipmapLevels = imgLoader.numMipmapLevels( setup ); currentSources = new RandomAccessibleInterval[ numMipmapLevels ]; currentInterpolatedSources = new RealRandomAccessible[ numMipmapLevels ][ 2 ]; currentSourceTransforms = new AffineTransform3D[ numMipmapLevels ]; for ( int level = 0; level < numMipmapLevels; level++ ) currentSourceTransforms[ level ] = new AffineTransform3D(); interpolatorFactories = new InterpolatorFactory[ numInterpolationMethods ]; interpolatorFactories[ iNearestNeighborMethod ] = new NearestNeighborInterpolatorFactory< T >(); interpolatorFactories[ iNLinearMethod ] = new NLinearInterpolatorFactory< T >(); loadTimepoint( 0 ); } protected abstract void loadTimepoint( final int timepoint ); @Override public boolean isPresent( final int t ) { return t >= 0 && t < numTimepoints; } @Override public synchronized RandomAccessibleInterval< T > getSource( final int t, final int level ) { if ( t != currentTimepoint ) loadTimepoint( t ); return currentSources[ level ]; } @Override public synchronized RealRandomAccessible< T > getInterpolatedSource( final int t, final int level, final Interpolation method ) { if ( t != currentTimepoint ) loadTimepoint( t ); return currentInterpolatedSources[ level ][ method == Interpolation.NLINEAR ? iNLinearMethod : iNearestNeighborMethod ]; } @Override public synchronized AffineTransform3D getSourceTransform( final int t, final int level ) { if ( t != currentTimepoint ) loadTimepoint( t ); return currentSourceTransforms[ level ]; } @Override public String getName() { return name; } @Override public int getNumMipmapLevels() { return numMipmapLevels; } }
package mondrian.olap.fun; import mondrian.olap.*; import mondrian.olap.type.*; import mondrian.olap.type.DimensionType; import mondrian.resource.MondrianResource; import java.util.*; import java.io.PrintWriter; /** * Abstract implementation of {@link FunTable}. * * <p>The derived class must implement {@link #defineFunctions()} to define * each function which will be recognized by this table. This method is called * from the constructor, after which point, no further functions can be added. */ public abstract class FunTableImpl implements FunTable { /** * Maps the upper-case name of a function plus its * {@link mondrian.olap.Syntax} to an array of * {@link mondrian.olap.Validator} objects for that name. */ protected final Map mapNameToResolvers = new HashMap(); private final HashSet reservedWords = new HashSet(); private final HashSet propertyWords = new HashSet(); protected static final Resolver[] emptyResolverArray = new Resolver[0]; /** used during initialization **/ protected final List resolvers = new ArrayList(); protected final List funInfoList = new ArrayList(); protected FunTableImpl() { } /** * Initializes the function table. */ public void init() { defineFunctions(); organizeFunctions(); } protected static String makeResolverKey(String name, Syntax syntax) { return name.toUpperCase() + "$" + syntax; } protected void define(FunDef funDef) { define(new SimpleResolver(funDef)); } protected void define(Resolver resolver) { addFunInfo(resolver); if (resolver.getSyntax() == Syntax.Property) { defineProperty(resolver.getName()); } resolvers.add(resolver); final String[] reservedWords = resolver.getReservedWords(); for (int i = 0; i < reservedWords.length; i++) { String reservedWord = reservedWords[i]; defineReserved(reservedWord); } } protected void addFunInfo(Resolver resolver) { this.funInfoList.add(FunInfo.make(resolver)); } public Exp createValueFunCall(Exp exp, Validator validator) { final Type type = exp.getTypeX(); if (type instanceof ScalarType) { return exp; } if (!TypeUtil.canEvaluate(type)) { String exprString = Util.unparse(exp); throw MondrianResource.instance().MdxMemberExpIsSet.ex(exprString); } if (type instanceof MemberType) { return new MemberScalarExp(exp); } else if (type instanceof DimensionType || type instanceof HierarchyType) { exp = new FunCall( "CurrentMember", Syntax.Property, new Exp[]{exp}); exp = exp.accept(validator); return new MemberScalarExp(exp); } else if (type instanceof TupleType) { if (exp instanceof FunCall) { FunCall call = (FunCall) exp; if (call.getFunDef() instanceof TupleFunDef) { return new MemberListScalarExp(call.getArgs()); } } return new TupleScalarExp(exp); } else { throw Util.newInternal("Unknown type " + type); } } /** * Creates an expression which will yield the current value of the current * measure. */ static Exp createValueFunCall() { return new ScalarExp(); } public FunDef getDef(FunCall call, Validator validator) { String key = makeResolverKey(call.getFunName(), call.getSyntax()); // Resolve function by its upper-case name first. If there is only one // function with that name, stop immediately. If there is more than // function, use some custom method, which generally involves looking // at the type of one of its arguments. String signature = call.getSyntax().getSignature(call.getFunName(), Category.Unknown, ExpBase.getTypes(call.getArgs())); Resolver[] resolvers = (Resolver[]) mapNameToResolvers.get(key); if (resolvers == null) { resolvers = emptyResolverArray; } int[] conversionCount = new int[] {0}; int minConversions = Integer.MAX_VALUE; int matchCount = 0; FunDef matchDef = null; for (int i = 0; i < resolvers.length; i++) { conversionCount[0] = 0; FunDef def = resolvers[i].resolve( call.getArgs(), validator, conversionCount); if (def != null) { int conversions = conversionCount[0]; if (conversions < minConversions) { minConversions = conversions; matchCount = 1; matchDef = def; } else if (conversions == minConversions) { matchCount++; } else { // ignore this match -- it required more coercions than // other overloadings we've seen } } } switch (matchCount) { case 0: throw MondrianResource.instance().NoFunctionMatchesSignature.ex( signature); case 1: final String matchKey = makeResolverKey(matchDef.getName(), matchDef.getSyntax()); Util.assertTrue(matchKey.equals(key), matchKey); return matchDef; default: throw MondrianResource.instance().MoreThanOneFunctionMatchesSignature.ex(signature); } } public boolean requiresExpression( FunCall call, int k, Validator validator) { final FunDef funDef = call.getFunDef(); if (funDef != null) { final int[] parameterTypes = funDef.getParameterTypes(); return parameterTypes[k] != Category.Set; } // The function call has not been resolved yet. In fact, this method // may have been invoked while resolving the child. Consider this: // CrossJoin([Measures].[Unit Sales] * [Measures].[Store Sales]) // In order to know whether to resolve '*' to the multiplication // operator (which returns a scalar) or the crossjoin operator (which // returns a set) we have to know what kind of expression is expected. String key = makeResolverKey(call.getFunName(), call.getSyntax()); Resolver[] resolvers = (Resolver[]) mapNameToResolvers.get(key); if (resolvers == null) { resolvers = emptyResolverArray; } for (int i = 0; i < resolvers.length; i++) { Resolver resolver2 = resolvers[i]; if (!resolver2.requiresExpression(k)) { // This resolver accepts a set in this argument position, // therefore we don't REQUIRE a scalar expression. return false; } } return true; } public List getReservedWords() { return new ArrayList(reservedWords); } public boolean isReserved(String s) { return reservedWords.contains(s.toUpperCase()); } /** * Defines a reserved word. * @see #isReserved */ protected void defineReserved(String s) { reservedWords.add(s.toUpperCase()); } public List getResolvers() { final List list = new ArrayList(); final Collection c = mapNameToResolvers.values(); for (Iterator iterator = c.iterator(); iterator.hasNext();) { Resolver[] resolvers = (Resolver[]) iterator.next(); for (int i = 0; i < resolvers.length; i++) { Resolver resolver = resolvers[i]; list.add(resolver); } } return list; } public boolean isProperty(String s) { return propertyWords.contains(s.toUpperCase()); } /** * Defines a word matching a property function name. * @see #isProperty */ protected void defineProperty(String s) { propertyWords.add(s.toUpperCase()); } public List getFunInfoList() { return Collections.unmodifiableList(this.funInfoList); } /** * Indexes the collection of functions. */ protected void organizeFunctions() { Collections.sort(funInfoList); // Map upper-case function names to resolvers. for (int i = 0, n = resolvers.size(); i < n; i++) { Resolver resolver = (Resolver) resolvers.get(i); String key = makeResolverKey(resolver.getName(), resolver.getSyntax()); final Object value = mapNameToResolvers.get(key); if (value instanceof Resolver[]) { continue; // has already been converted } List v2 = (List) value; if (v2 == null) { v2 = new ArrayList(); mapNameToResolvers.put(key, v2); } v2.add(resolver); } // Convert the Lists into arrays. for (Iterator keys = mapNameToResolvers.keySet().iterator(); keys.hasNext();) { String key = (String) keys.next(); final Object value = mapNameToResolvers.get(key); if (value instanceof Resolver[]) { continue; // has already been converted } List v2 = (List) value; mapNameToResolvers.put(key, v2.toArray(new Resolver[v2.size()])); } } /** * This method is called from the constructor, to define the set of * functions and reserved words recognized. * * <p>Each function is declared by calling {@link #define}. Each reserved * word is declared by calling {@link #defineReserved(String)}. * * <p>Derived class can override this method to add more functions. **/ protected abstract void defineFunctions(); /** * Wrapper which evaluates an expression to a tuple, sets the current * context from that tuple, and converts it to a scalar expression. */ public static class TupleScalarExp extends ExpBase { private final Exp exp; public TupleScalarExp(Exp exp) { this.exp = exp; assert exp.getTypeX() instanceof TupleType; } public Object[] getChildren() { return new Object[] {exp}; } public void unparse(PrintWriter pw) { exp.unparse(pw); } public Object clone() { return this; } public int getCategory() { return exp.getCategory(); } public Type getTypeX() { return new ScalarType(); } public Exp accept(Validator validator) { final Exp exp2 = validator.validate(exp, false); if (exp2 == exp) { //return this; } final FunTable funTable = validator.getFunTable(); return funTable.createValueFunCall(exp2, validator); } public boolean dependsOn(Dimension dimension) { // The value at the current context by definition depends upon // all dimensions. return true; } public Object evaluate(Evaluator evaluator) { return exp.evaluateScalar(evaluator); } } /** * Wrapper which evaluates an expression to a dimensional context and * converts it to a scalar expression. */ public static class MemberScalarExp extends ExpBase { private final Exp exp; public MemberScalarExp(Exp exp) { this.exp = exp; } public Object[] getChildren() { return new Object[] {exp}; } public void unparse(PrintWriter pw) { exp.unparse(pw); } public Object clone() { return this; } public int getCategory() { return exp.getCategory(); } public Type getTypeX() { return new ScalarType(); } public Exp accept(Validator validator) { final Exp exp2 = validator.validate(exp, false); if (exp2 == exp) { return this; } final FunTable funTable = validator.getFunTable(); return funTable.createValueFunCall(exp2, validator); } public boolean dependsOn(Dimension dimension) { // If the expression has type dimension // but does not depend on dimension // then this expression does not dimension on dimension. // Otherwise it depends on everything. final Type type = exp.getTypeX(); if (type.usesDimension(dimension)) { return exp.dependsOn(dimension); } else { return true; } } public Object evaluate(Evaluator evaluator) { final Member member = (Member) exp.evaluate(evaluator); if (member == null || member.isNull()) { return null; } Member old = evaluator.setContext(member); Object value = evaluator.evaluateCurrent(); evaluator.setContext(old); return value; } } /** * An expression which yields the current value of the current member. */ public static class ScalarExp extends ExpBase { public ScalarExp() { } public void unparse(PrintWriter pw) { pw.print("$Value()"); } public Object clone() { return this; } public int getCategory() { return Category.Numeric; } public Type getTypeX() { return new NumericType(); } public Exp accept(Validator validator) { return this; } public boolean dependsOn(Dimension dimension) { // The value at the current context by definition depends upon // all dimensions. return true; } public Object evaluate(Evaluator evaluator) { return evaluator.evaluateCurrent(); } } /** * An expression which evaluates a list of members, sets the context to * these members, then evaluates the current measure as a scalar * expression. * * <p>A typical expression which would be evaluated in this way is: * <blockquote><code>WITH MEMBER [Measures].[Female Sales] AS * ' ( [Measures].[Unit Sales], [Gender].[F] ) '</code></blockquote> * * @see TupleScalarExp */ public static class MemberListScalarExp extends ExpBase { private final Exp[] exps; public MemberListScalarExp(Exp[] exps) { this.exps = exps; for (int i = 0; i < exps.length; i++) { assert exps[i].getTypeX() instanceof MemberType; } } public void unparse(PrintWriter pw) { unparseList(pw, exps, "(", ", ", ")"); } public Object[] getChildren() { return exps; } public Object clone() { return this; } public int getCategory() { return Category.Numeric; } public Type getTypeX() { return new NumericType(); } public Exp accept(Validator validator) { return this; } public boolean dependsOn(Dimension dimension) { // This expression depends upon dimension // if none of the sub-expressions returns a member of dimension // or if one of the sub-expressions is dependent upon dimension. // Examples: // ( [Gender].[M], [Marital Status].CurrentMember ) // does not depend upon [Gender], because one of the members is // of the [Gender] dimension, yet none of the expressions depends // upon [Gender]. // ( [Store].[USA], [Marital Status].CurrentMember ) // depends upon [Gender], because none of the members is of // the [Gender] dimension. boolean uses = false; for (int i = 0; i < exps.length; i++) { Exp exp = exps[i]; if (exp.dependsOn(dimension)) { return true; } final Type type = exp.getTypeX(); if (type.usesDimension(dimension)) { uses = true; } } return !uses; } public Object evaluate(Evaluator evaluator) { Evaluator evaluator2 = evaluator.push(); for (int i = 0; i < exps.length; i++) { Exp exp = exps[i]; final Member member = (Member) exp.evaluate(evaluator); evaluator2.setContext(member); } return evaluator2.evaluateCurrent(); } } } // End FunTableImpl.java
package speedtyper.service.impl; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import speedtyper.dao.RoomDao; import speedtyper.model.RoomModel; import speedtyper.model.UserModel; import speedtyper.service.RoomService; @Service public class RoomServiceImpl implements RoomService { @Autowired private RoomDao roomDao; @Transactional public void add(RoomModel room) { this.roomDao.add(room); } @Transactional public void update(RoomModel room) { this.roomDao.update(room); } @Transactional public void delete(int roomId) { this.roomDao.delete(roomId); } @Transactional public RoomModel getRoom(int roomId) { return this.roomDao.getRoom(roomId); } @Transactional public List<RoomModel> getAllRooms() { return this.roomDao.getAllRooms(); } @Transactional public List<RoomModel> getAvaibleRooms() { return this.roomDao.getAvaibleRooms(); } }
package com.remondis.remap; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.junit.Test; import com.remondis.remap.test.MapperTests.PersonWithAddress; import com.remondis.remap.test.MapperTests.PersonWithFoo; public class MapperTest { public static final String MORE_IN_A = "moreInA"; public static final Long ZAHL_IN_A = -88L; public static final Integer B_INTEGER = -999; public static final int B_NUMBER = 222; public static final String B_STRING = "b string"; public static final Integer INTEGER = 310; public static final int NUMBER = 210; public static final String STRING = "a string"; @Test(expected = MappingException.class) public void shouldDenyMapNull() { Mapper<A, AResource> mapper = Mapping.from(A.class) .to(AResource.class) .reassign(A::getMoreInA) .to(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(AResource::getZahlInAResource) .useMapper(Mapping.from(B.class) .to(BResource.class) .mapper()) .mapper(); mapper.map((A) null); } @Test public void shouldFailDueToNoRegisteredMapper() { assertThatThrownBy(() -> Mapping.from(A.class) .to(AResource.class) .reassign(A::getMoreInA) .to(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(AResource::getZahlInAResource) .mapper()).isInstanceOf(MappingException.class) .hasMessageStartingWith("No mapper found for type mapping"); } /** * This is the happy-path test for mapping {@link A} to {@link AResource} with a nested mapping. This test does not * check the inherited fields. */ @Test public void shouldMapCorrectly() { Mapper<A, AResource> mapper = Mapping.from(A.class) .to(AResource.class) .omitInSource(A::getMoreInA) .omitInDestination(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(AResource::getZahlInAResource) .useMapper(Mapping.from(B.class) .to(BResource.class) .mapper()) .mapper(); B b = new B(B_STRING, B_NUMBER, B_INTEGER); A a = new A(MORE_IN_A, STRING, NUMBER, INTEGER, ZAHL_IN_A, b); a.setZahlInA(ZAHL_IN_A); AResource ar = mapper.map(a); assertNull(ar.getMoreInAResource()); assertEquals(STRING, a.getString()); assertEquals(STRING, ar.getString()); assertEquals(NUMBER, a.getNumber()); assertEquals(NUMBER, ar.getNumber()); assertEquals(INTEGER, a.getInteger()); assertEquals(INTEGER, ar.getInteger()); assertEquals(ZAHL_IN_A, a.getZahlInA()); assertEquals(ZAHL_IN_A, ar.getZahlInAResource()); BResource br = ar.getB(); assertEquals(B_STRING, b.getString()); assertEquals(B_STRING, br.getString()); assertEquals(B_NUMBER, b.getNumber()); assertEquals(B_NUMBER, br.getNumber()); assertEquals(B_INTEGER, b.getInteger()); assertEquals(B_INTEGER, br.getInteger()); } /** * Ensures that the {@link Mapper} detects one more property in the source object that is not omitted by the mapping * configuration. The {@link Mapper} is expected to throw a {@link MappingException}. */ @Test(expected = MappingException.class) public void oneMoreSourceFieldInA() { Mapping.from(AWithOneMoreSourceField.class) .to(AResourceWithOneMoreSourceField.class) .mapper(); } /** * Ensures that an unmatched source field is omitted. */ @Test public void oneMoreSourceFieldInAButItIsOmitted() { Mapper<AWithOneMoreSourceField, AResourceWithOneMoreSourceField> mapper = Mapping .from(AWithOneMoreSourceField.class) .to(AResourceWithOneMoreSourceField.class) .omitInSource(a -> a.getOnlyInA()) .mapper(); AWithOneMoreSourceField aWithOneMoreSourceField = new AWithOneMoreSourceField(1, 10, "text"); AResourceWithOneMoreSourceField map = mapper.map(aWithOneMoreSourceField); assertEquals(aWithOneMoreSourceField.getText(), map.getText()); assertEquals(aWithOneMoreSourceField.getZahl(), map.getZahl()); } /** * Ensures that the {@link Mapper} detects one more property in the destination object that is not omitted by the * mapping * configuration. The {@link Mapper} is expected to throw a {@link MappingException}. */ @Test(expected = MappingException.class) public void oneMoreDestinationFieldInAResource() { Mapping.from(AWithOneMoreDestinationField.class) .to(AResourceWithOneMoreDestinationField.class) .mapper(); } /** * Ensures that an unmatched destination field is omitted. */ @Test public void oneMoreDestinationFieldInAResourceButItsOmmited() { Mapper<AWithOneMoreDestinationField, AResourceWithOneMoreDestinationField> mapper = Mapping .from(AWithOneMoreDestinationField.class) .to(AResourceWithOneMoreDestinationField.class) .omitInDestination(ar -> ar.getOnlyInAResource()) .mapper(); AWithOneMoreDestinationField aWithOneMoreDestinationField = new AWithOneMoreDestinationField(10, "text"); AResourceWithOneMoreDestinationField map = mapper.map(aWithOneMoreDestinationField); assertEquals(aWithOneMoreDestinationField.getText(), map.getText()); assertEquals(aWithOneMoreDestinationField.getZahl(), map.getZahl()); } /** * Ensures that the mapper performs a correct reassigment of fields. */ @Test public void reassign() { Mapper<AReassign, AResourceReassign> mapper = Mapping.from(AReassign.class) .to(AResourceReassign.class) .reassign(AReassign::getFirstNumberInA) .to(AResourceReassign::getFirstNumberInAResource) .reassign(AReassign::getSecondNumberInA) .to(AResourceReassign::getSecondNumberInAResource) .mapper(); AReassign aReassgin = new AReassign(1, 2, 3); AResourceReassign map = mapper.map(aReassgin); assertEquals(aReassgin.getZahl(), map.getZahl()); assertEquals(aReassgin.getFirstNumberInA(), map.getFirstNumberInAResource()); assertEquals(aReassgin.getSecondNumberInA(), map.getSecondNumberInAResource()); } /** * Ensures that the mapper does not allow an omitted field in the source to be reassigned. */ @Test(expected = MappingException.class) public void reassignAnOmmitedFieldInSource() { Mapping.from(AReassign.class) .to(AResourceReassign.class) .omitInSource(AReassign::getFirstNumberInA) .reassign(AReassign::getFirstNumberInA) .to(AResourceReassign::getFirstNumberInAResource) .reassign(AReassign::getSecondNumberInA) .to(AResourceReassign::getSecondNumberInAResource) .mapper(); } /** * Ensures that the mapper does not allow an omitted field in the destination to be reassigned. */ @Test(expected = MappingException.class) public void reassignToAnOmmitedFieldInDestination() { Mapping.from(AReassign.class) .to(AResourceReassign.class) .omitInDestination(ar -> ar.getFirstNumberInAResource()) .reassign(AReassign::getFirstNumberInA) .to(AResourceReassign::getFirstNumberInAResource) .reassign(AReassign::getSecondNumberInA) .to(AResourceReassign::getSecondNumberInAResource) .mapper(); } /** * Ensures that the mapper detects an unmapped field in the destination while the all source fields are mapped. */ @Test(expected = MappingException.class) public void reassignAndOneDestinationFieldIsUnmapped() { Mapping.from(AReassign.class) .to(AResourceReassign.class) .reassign(AReassign::getFirstNumberInA) .to(AResourceReassign::getSecondNumberInAResource) .omitInSource(AReassign::getSecondNumberInA) .mapper(); } @SuppressWarnings("rawtypes") @Test public void shouldMapToNewList() { Mapper<A, AResource> mapper = Mapping.from(A.class) .to(AResource.class) .omitInSource(A::getMoreInA) .omitInDestination(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(AResource::getZahlInAResource) .useMapper(Mapping.from(B.class) .to(BResource.class) .mapper()) .mapper(); B b = new B(B_STRING, B_NUMBER, B_INTEGER); A a = new A(MORE_IN_A, STRING, NUMBER, INTEGER, ZAHL_IN_A, b); a.setZahlInA(ZAHL_IN_A); A[] aarr = new A[] { a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a }; List<A> aList = Arrays.asList(aarr); List<AResource> arCollection = mapper.map(aList); // Make sure this is a new collection assertFalse((List) aList == (List) arCollection); assertEquals(aarr.length, aList.size()); assertEquals(aarr.length, arCollection.size()); for (AResource ar : arCollection) { assertNull(ar.getMoreInAResource()); assertEquals(STRING, a.getString()); assertEquals(STRING, ar.getString()); assertEquals(NUMBER, a.getNumber()); assertEquals(NUMBER, ar.getNumber()); assertEquals(INTEGER, a.getInteger()); assertEquals(INTEGER, ar.getInteger()); assertEquals(ZAHL_IN_A, a.getZahlInA()); assertEquals(ZAHL_IN_A, ar.getZahlInAResource()); BResource br = ar.getB(); assertEquals(B_STRING, b.getString()); assertEquals(B_STRING, br.getString()); assertEquals(B_NUMBER, b.getNumber()); assertEquals(B_NUMBER, br.getNumber()); assertEquals(B_INTEGER, b.getInteger()); assertEquals(B_INTEGER, br.getInteger()); } } @SuppressWarnings("rawtypes") @Test public void shouldMapToNewSet() { Mapper<A, AResource> mapper = Mapping.from(A.class) .to(AResource.class) .omitInSource(A::getMoreInA) .omitInDestination(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(AResource::getZahlInAResource) .useMapper(Mapping.from(B.class) .to(BResource.class) .mapper()) .mapper(); int max = 10; A[] aarr = new A[max]; for (int i = 0; i < max; i++) { B b = new B(B_STRING, B_NUMBER, B_INTEGER); A a = new A(MORE_IN_A, STRING, NUMBER, INTEGER, ZAHL_IN_A, b); a.setZahlInA(ZAHL_IN_A); aarr[i] = a; } Set<A> aList = new HashSet<>(Arrays.asList(aarr)); Set<AResource> arCollection = mapper.map(aList); // Make sure this is a new collection assertFalse((Set) aList == (Set) arCollection); assertEquals(max, aList.size()); assertEquals(max, arCollection.size()); Iterator<A> as = aList.iterator(); Iterator<AResource> ars = arCollection.iterator(); while (as.hasNext()) { A a = as.next(); AResource ar = ars.next(); assertNull(ar.getMoreInAResource()); assertEquals(STRING, a.getString()); assertEquals(STRING, ar.getString()); assertEquals(NUMBER, a.getNumber()); assertEquals(NUMBER, ar.getNumber()); assertEquals(INTEGER, a.getInteger()); assertEquals(INTEGER, ar.getInteger()); assertEquals(ZAHL_IN_A, a.getZahlInA()); assertEquals(ZAHL_IN_A, ar.getZahlInAResource()); B b = a.getB(); BResource br = ar.getB(); assertEquals(B_STRING, b.getString()); assertEquals(B_STRING, br.getString()); assertEquals(B_NUMBER, b.getNumber()); assertEquals(B_NUMBER, br.getNumber()); assertEquals(B_INTEGER, b.getInteger()); assertEquals(B_INTEGER, br.getInteger()); } } @Test public void shouldDenyIllegalArguments() { assertThatThrownBy(() -> { Mapping.from(null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); assertThatThrownBy(() -> { Mapping.from(A.class) .to(null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); assertThatThrownBy(() -> { Mapping.from(A.class) .to(AResource.class) .omitInSource(null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); assertThatThrownBy(() -> { Mapping.from(A.class) .to(AResource.class) .omitInSource(A::getMoreInA) .omitInDestination(null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); assertThatThrownBy(() -> { Mapping.from(A.class) .to(AResource.class) .omitInSource(A::getMoreInA) .omitInDestination(AResource::getMoreInAResource) .reassign(null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); assertThatThrownBy(() -> { Mapping.from(A.class) .to(AResource.class) .omitInSource(A::getMoreInA) .omitInDestination(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); assertThatThrownBy(() -> { Mapping.from(A.class) .to(AResource.class) .omitInSource(A::getMoreInA) .omitInDestination(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(AResource::getZahlInAResource) .useMapper((Mapper<?, ?>) null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); // Perform the API test on replace assertThatThrownBy(() -> { Mapping.from(PersonWithAddress.class) .to(PersonWithFoo.class) .replace(null, PersonWithFoo::getFoo); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); assertThatThrownBy(() -> { Mapping.from(PersonWithAddress.class) .to(PersonWithFoo.class) .replace(PersonWithAddress::getAddress, null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); assertThatThrownBy(() -> { Mapping.from(PersonWithAddress.class) .to(PersonWithFoo.class) .replace(PersonWithAddress::getAddress, PersonWithFoo::getFoo) .with(null); }).isInstanceOf(IllegalArgumentException.class) .hasNoCause(); } }
package com.remondis.remap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import java.util.Arrays; import java.util.List; import org.junit.Test; import com.remondis.remap.inheritance.Child; import com.remondis.remap.inheritance.ChildResource; public class MapperTest { private static final String MORE_IN_A = "moreInA"; private static final Long ZAHL_IN_A = -88L; private static final Integer B_INTEGER = -999; private static final int B_NUMBER = 222; private static final String B_STRING = "b string"; private static final Integer INTEGER = 310; private static final int NUMBER = 210; private static final String STRING = "a string"; @Test(expected = MappingException.class) public void shouldDenyMapNull() { Mapper<A, AResource> mapper = Mapping.from(A.class) .to(AResource.class) .reassign(A::getMoreInA) .to(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(AResource::getZahlInAResource) .useMapper(Mapping.from(B.class) .to(BResource.class) .mapper()) .mapper(); mapper.map((A) null); } /** * Ensures that the mapper maps inherited field correctly. */ @Test public void shouldMapInheritedFields() { Mapper<Child, ChildResource> map = Mapping.from(Child.class) .to(ChildResource.class) .omitInSource(Child::getMoreInParent) .omitInDestination(ChildResource::getMoreInParentResource) .useMapper(Mapping.from(B.class) .to(BResource.class) .mapper()) .mapper(); B b = new B(B_STRING, B_NUMBER, B_INTEGER); Object shouldNotMap = new Object(); Object object = new Object(); int zahl = 11; Child child = new Child(shouldNotMap, STRING, b, object, zahl); ChildResource cr = map.map(child); assertNull(cr.getMoreInParentResource()); assertEquals(STRING, child.getString()); assertEquals(STRING, cr.getString()); assertEquals(object, child.getObject()); assertEquals(object, cr.getObject()); assertEquals(zahl, child.getZahl()); assertEquals(zahl, cr.getZahl()); BResource br = cr.getB(); assertEquals(B_STRING, b.getString()); assertEquals(B_STRING, br.getString()); assertEquals(B_NUMBER, b.getNumber()); assertEquals(B_NUMBER, br.getNumber()); assertEquals(B_INTEGER, b.getInteger()); assertEquals(B_INTEGER, br.getInteger()); } /** * This is the happy-path test for mapping {@link A} to {@link AResource} with a nested mapping. This test does not * check the inherited fields. */ @SuppressWarnings("rawtypes") @Test public void shouldMapCorrectly() { Mapper<A, AResource> mapper = Mapping.from(A.class) .to(AResource.class) .omitInSource(A::getMoreInA) .omitInDestination(AResource::getMoreInAResource) .reassign(A::getZahlInA) .to(AResource::getZahlInAResource) .useMapper(Mapping.from(B.class) .to(BResource.class) .mapper()) .mapper(); B b = new B(B_STRING, B_NUMBER, B_INTEGER); A a = new A(MORE_IN_A, STRING, NUMBER, INTEGER, ZAHL_IN_A, b); a.setZahlInA(ZAHL_IN_A); A[] aarr = new A[] { a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a }; List<A> aList = Arrays.asList(aarr); List<AResource> arCollection = mapper.map(aList); // Make sure this is a new collection assertFalse((List) aList == (List) arCollection); for (AResource ar : arCollection) { assertNull(ar.getMoreInAResource()); assertEquals(STRING, a.getString()); assertEquals(STRING, ar.getString()); assertEquals(NUMBER, a.getNumber()); assertEquals(NUMBER, ar.getNumber()); assertEquals(INTEGER, a.getInteger()); assertEquals(INTEGER, ar.getInteger()); assertEquals(ZAHL_IN_A, a.getZahlInA()); assertEquals(ZAHL_IN_A, ar.getZahlInAResource()); BResource br = ar.getB(); assertEquals(B_STRING, b.getString()); assertEquals(B_STRING, br.getString()); assertEquals(B_NUMBER, b.getNumber()); assertEquals(B_NUMBER, br.getNumber()); assertEquals(B_INTEGER, b.getInteger()); assertEquals(B_INTEGER, br.getInteger()); } } /** * Ensures that the {@link Mapper} detects one more property in the source object that is not omitted by the mapping * configuration. The {@link Mapper} is expected to throw a {@link MappingException}. */ @Test(expected = MappingException.class) public void oneMoreSourceFieldInA() { Mapping.from(AWithOneMoreSourceField.class) .to(AResourceWithOneMoreSourceField.class) .mapper(); } /** * Ensures that an unmatched source field is omitted. */ @Test public void oneMoreSourceFieldInAButItIsOmitted() { Mapper<AWithOneMoreSourceField, AResourceWithOneMoreSourceField> mapper = Mapping.from(AWithOneMoreSourceField.class) .to(AResourceWithOneMoreSourceField.class) .omitInSource(a -> a.getOnlyInA()) .mapper(); AWithOneMoreSourceField aWithOneMoreSourceField = new AWithOneMoreSourceField(1, 10, "text"); AResourceWithOneMoreSourceField map = mapper.map(aWithOneMoreSourceField); assertEquals(aWithOneMoreSourceField.getText(), map.getText()); assertEquals(aWithOneMoreSourceField.getZahl(), map.getZahl()); } /** * Ensures that the {@link Mapper} detects one more property in the destination object that is not omitted by the * mapping * configuration. The {@link Mapper} is expected to throw a {@link MappingException}. */ @Test(expected = MappingException.class) public void oneMoreDestinationFieldInAResource() { Mapping.from(AWithOneMoreDestinationField.class) .to(AResourceWithOneMoreDestinationField.class) .mapper(); } /** * Ensures that an unmatched destination field is omitted. */ @Test public void oneMoreDestinationFieldInAResourceButItsOmmited() { Mapper<AWithOneMoreDestinationField, AResourceWithOneMoreDestinationField> mapper = Mapping.from(AWithOneMoreDestinationField.class) .to(AResourceWithOneMoreDestinationField.class) .omitInDestination(ar -> ar.getOnlyInAResource()) .mapper(); AWithOneMoreDestinationField aWithOneMoreDestinationField = new AWithOneMoreDestinationField(10, "text"); AResourceWithOneMoreDestinationField map = mapper.map(aWithOneMoreDestinationField); assertEquals(aWithOneMoreDestinationField.getText(), map.getText()); assertEquals(aWithOneMoreDestinationField.getZahl(), map.getZahl()); } /** * Ensures that the mapper performs a correct reassigment of fields. */ @Test public void reassign() { Mapper<AReassign, AResourceReassign> mapper = Mapping.from(AReassign.class) .to(AResourceReassign.class) .reassign(AReassign::getFirstNumberInA) .to(AResourceReassign::getFirstNumberInAResource) .reassign(AReassign::getSecondNumberInA) .to(AResourceReassign::getSecondNumberInAResource) .mapper(); AReassign aReassgin = new AReassign(1, 2, 3); AResourceReassign map = mapper.map(aReassgin); assertEquals(aReassgin.getZahl(), map.getZahl()); assertEquals(aReassgin.getFirstNumberInA(), map.getFirstNumberInAResource()); assertEquals(aReassgin.getSecondNumberInA(), map.getSecondNumberInAResource()); } /** * Ensures that the mapper does not allow an omitted field in the source to be reassigned. */ @Test(expected = MappingException.class) public void reassignAnOmmitedFieldInSource() { Mapping.from(AReassign.class) .to(AResourceReassign.class) .omitInSource(AReassign::getFirstNumberInA) .reassign(AReassign::getFirstNumberInA) .to(AResourceReassign::getFirstNumberInAResource) .reassign(AReassign::getSecondNumberInA) .to(AResourceReassign::getSecondNumberInAResource) .mapper(); } /** * Ensures that the mapper does not allow an omitted field in the destination to be reassigned. */ @Test(expected = MappingException.class) public void reassignToAnOmmitedFieldInDestination() { Mapping.from(AReassign.class) .to(AResourceReassign.class) .omitInDestination(ar -> ar.getFirstNumberInAResource()) .reassign(AReassign::getFirstNumberInA) .to(AResourceReassign::getFirstNumberInAResource) .reassign(AReassign::getSecondNumberInA) .to(AResourceReassign::getSecondNumberInAResource) .mapper(); } /** * Ensures that the mapper detects an unmapped field in the destination while the all source fields are mapped. */ @Test(expected = MappingException.class) public void reassignAndOneDestinationFieldIsUnmapped() { Mapping.from(AReassign.class) .to(AResourceReassign.class) .reassign(AReassign::getFirstNumberInA) .to(AResourceReassign::getSecondNumberInAResource) .omitInSource(AReassign::getSecondNumberInA) .mapper(); } }
package control; import data.ScriptingProject; import gui.centerarea.CameraShotBlock; import gui.root.RootPane; import javafx.application.Platform; import javafx.stage.Stage; import org.junit.Before; import org.junit.Test; import org.mockito.Matchers; import org.mockito.Mockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.reflect.internal.WhiteboxImpl; import org.testfx.framework.junit.ApplicationTest; import java.util.concurrent.CountDownLatch; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * @author alex */ @PrepareForTest(ControllerManager.class) public class ControllerManagerTest extends ApplicationTest { ControllerManager controllerManager; RootPane rootPaneMock; DetailViewController detailViewControllerMock; ToolViewController toolViewControllerMock; @Before public void initialize() { // GUI element necessary for instantiation rootPaneMock = Mockito.mock(RootPane.class); TimelineController timelineController = Mockito.mock(TimelineController.class); detailViewControllerMock = Mockito.mock(DetailViewController.class); toolViewControllerMock = Mockito.mock(ToolViewController.class); DirectorTimelineController directorTimelineController = Mockito.mock(DirectorTimelineController.class); ProjectController projectController = Mockito.mock(ProjectController.class); controllerManager = new ControllerManager(rootPaneMock, timelineController, detailViewControllerMock, toolViewControllerMock, directorTimelineController, projectController); } @Test public void getTimelineControllerTest() { assertNotNull(controllerManager.getTimelineControl()); } @Test public void getDirectorTimelineControllerTest() { assertNotNull(controllerManager.getDirectorTimelineControl()); } @Test public void getToolViewControllerTest() { assertNotNull(controllerManager.getToolViewController()); } @Test public void getDetailViewControllerTest() { assertNotNull(controllerManager.getDetailViewController()); } @Test public void getProjectControllerTest() { assertNotNull(controllerManager.getProjectController()); } @Test public void getActiveShotBlockTest() { assertNull(controllerManager.getActiveShotBlock()); } @Override public void start(Stage stage) throws Exception { } // TODO: Fix the test conflicts w/ javafx issues // @Test // public void updateWindowTitleTest() throws InterruptedException { // // Call method under test // final CountDownLatch[] latch = {new CountDownLatch(1)}; // Platform.runLater(() -> { // Stage primaryStage = Mockito.mock(Stage.class); // when(rootPaneMock.getPrimaryStage()).thenReturn(primaryStage); // ScriptingProject scriptingProjectMock = Mockito.mock(ScriptingProject.class); // when(scriptingProjectMock.getName()).thenReturn("I'M A TITLE!"); // controllerManager.setScriptingProject(scriptingProjectMock); // controllerManager.updateWindowTitle(); // latch[0].countDown(); // Mockito.verify(primaryStage, times(1)).setTitle("I'M A TITLE"); // latch[0].await(); @Test public void setActiveShotBlockTest() { CameraShotBlock shotBlockMock = mock(CameraShotBlock.class); controllerManager.setActiveShotBlock(shotBlockMock); assertEquals(shotBlockMock, controllerManager.getActiveShotBlock()); verify(detailViewControllerMock).activeBlockChanged(); verify(toolViewControllerMock).activeBlockChanged(); } // TODO: Fix the test conflicts w/ javafx issues // @Test // public void initOnCloseOperationTest() throws InterruptedException { // Stage primaryStage = Mockito.mock(Stage.class); // when(rootPaneMock.getPrimaryStage()).thenReturn(primaryStage); // // Call method under test // final CountDownLatch[] latch = {new CountDownLatch(1)}; // Platform.runLater(() -> { // try { // WhiteboxImpl.invokeMethod(controllerManager, "initOnCloseOperation"); // } catch (Exception e) { // e.printStackTrace(); // latch[0].countDown(); // latch[0].await(); // Mockito.verify(primaryStage, times(1)).setOnCloseRequest(Matchers.anyObject()); }
package guitests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.File; import java.util.List; import java.util.Map; import javafx.scene.control.ComboBox; import javafx.scene.control.TextField; import javafx.scene.input.KeyCode; import org.eclipse.egit.github.core.RepositoryId; import org.junit.Test; import org.loadui.testfx.utils.FXTestUtils; import prefs.Preferences; public class UseGlobalConfigsTest extends UITest { @Override public void launchApp() { // isTestMode in UI checks for testconfig too so we don't need to specify --test=true here. FXTestUtils.launchApp(TestUI.class, "--testconfig=true"); } @Test public void globalConfigTest() { // Override setupMethod() if you want to do stuff to the JSON beforehand TextField repoOwnerField = find("#repoOwnerField"); doubleClick(repoOwnerField); doubleClick(repoOwnerField); type("dummy").push(KeyCode.TAB); type("dummy").push(KeyCode.TAB); type("test").push(KeyCode.TAB); type("test"); click("Sign in"); sleep(8000); ComboBox<String> repositorySelector = find("#repositorySelector"); assertEquals(repositorySelector.getValue(), "dummy/dummy"); // Make a new board click("Boards"); click("Save"); // Somehow the text field cannot be populated by typing on the CI, use setText instead. // TODO find out why ((TextField) find("#boardnameinput")).setText("Empty Board"); click("OK"); // Load dummy2/dummy2 too press(KeyCode.CONTROL).press(KeyCode.P).release(KeyCode.P).release(KeyCode.CONTROL); sleep(2000); click("#dummy/dummy_col1_filterTextField"); type("repo"); press(KeyCode.SHIFT).press(KeyCode.SEMICOLON).release(KeyCode.SEMICOLON).release(KeyCode.SHIFT); type("dummy2/dummy2"); push(KeyCode.ENTER); sleep(8000); // Make a new board click("Boards"); click("Save"); ((TextField) find("#boardnameinput")).setText("Dummy Board"); click("OK"); // Then exit program... click("File"); click("Quit"); // ...and check if the test JSON is still there... File testConfig = new File(Preferences.DIRECTORY, Preferences.TEST_CONFIG_FILE); if (!(testConfig.exists() && testConfig.isFile())) fail(); // ...then check that the JSON file contents are correct. Preferences testPref = new Preferences(true); // Credentials assertEquals("test", testPref.getLastLoginUsername()); assertEquals("test", testPref.getLastLoginPassword()); // Last open filters List<String> lastOpenFilters = testPref.getLastOpenFilters(); assertEquals(2, lastOpenFilters.size()); assertEquals("", lastOpenFilters.get(0)); assertEquals("repo:dummy2/dummy2", lastOpenFilters.get(1)); // Last viewed repository RepositoryId lastViewedRepository = testPref.getLastViewedRepository().get(); assertEquals("dummy/dummy", lastViewedRepository.generateId()); // Boards Map<String, List<String>> boards = testPref.getAllBoards(); List<String> emptyBoard = boards.get("Empty Board"); assertEquals(1, emptyBoard.size()); assertEquals("", emptyBoard.get(0)); List<String> dummyBoard = boards.get("Dummy Board"); assertEquals(2, dummyBoard.size()); assertEquals("", dummyBoard.get(0)); assertEquals("repo:dummy2/dummy2", dummyBoard.get(1)); } }
package hudson.plugins.git; import com.cloudbees.plugins.credentials.Credentials; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.CredentialsScope; import com.cloudbees.plugins.credentials.CredentialsStore; import com.cloudbees.plugins.credentials.SystemCredentialsProvider; import com.cloudbees.plugins.credentials.common.StandardCredentials; import com.cloudbees.plugins.credentials.domains.Domain; import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl; import com.gargoylesoftware.htmlunit.html.HtmlPage; import hudson.EnvVars; import hudson.FilePath; import hudson.Functions; import hudson.Launcher; import hudson.matrix.Axis; import hudson.matrix.AxisList; import hudson.matrix.MatrixBuild; import hudson.matrix.MatrixProject; import hudson.model.*; import hudson.plugins.git.GitSCM.BuildChooserContextImpl; import hudson.plugins.git.GitSCM.DescriptorImpl; import hudson.plugins.git.browser.GitRepositoryBrowser; import hudson.plugins.git.browser.GithubWeb; import hudson.plugins.git.extensions.GitSCMExtension; import hudson.plugins.git.extensions.impl.*; import hudson.plugins.git.util.BuildChooser; import hudson.plugins.git.util.BuildChooserContext; import hudson.plugins.git.util.BuildChooserContext.ContextCallable; import hudson.plugins.git.util.BuildData; import hudson.plugins.git.util.GitUtils; import hudson.plugins.parameterizedtrigger.BuildTrigger; import hudson.plugins.parameterizedtrigger.ResultCondition; import hudson.remoting.Channel; import hudson.remoting.VirtualChannel; import hudson.scm.ChangeLogSet; import hudson.scm.PollingResult; import hudson.scm.PollingResult.Change; import hudson.scm.SCMRevisionState; import hudson.security.ACL; import hudson.security.ACLContext; import hudson.slaves.DumbSlave; import hudson.slaves.EnvironmentVariablesNodeProperty.Entry; import hudson.tools.ToolLocationNodeProperty; import hudson.tools.ToolProperty; import hudson.triggers.SCMTrigger; import hudson.util.LogTaskListener; import hudson.util.RingBufferLogHandler; import hudson.util.StreamTaskListener; import jenkins.security.MasterToSlaveCallable; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.jenkinsci.plugins.tokenmacro.TokenMacro; import org.jenkinsci.plugins.gitclient.*; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.junit.Assume; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.LoggerRule; import org.jvnet.hudson.test.MockAuthorizationStrategy; import org.jvnet.hudson.test.TestExtension; import static org.jvnet.hudson.test.LoggerRule.recorded; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.ObjectStreamException; import java.io.Serializable; import java.net.URL; import java.text.MessageFormat; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.eclipse.jgit.transport.RemoteConfig; import static org.hamcrest.MatcherAssert.*; import static org.hamcrest.Matchers.*; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.JenkinsRule; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.mockito.Mockito; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import jenkins.model.Jenkins; import jenkins.plugins.git.CliGitCommand; import jenkins.plugins.git.GitSampleRepoRule; /** * Tests for {@link GitSCM}. * @author ishaaq */ public class GitSCMTest extends AbstractGitTestCase { @Rule public GitSampleRepoRule secondRepo = new GitSampleRepoRule(); @Rule public LoggerRule logRule = new LoggerRule(); private CredentialsStore store = null; @BeforeClass public static void setGitDefaults() throws Exception { CliGitCommand gitCmd = new CliGitCommand(null); gitCmd.setDefaults(); } @Before public void enableSystemCredentialsProvider() throws Exception { SystemCredentialsProvider.getInstance().setDomainCredentialsMap( Collections.singletonMap(Domain.global(), Collections.<Credentials>emptyList())); for (CredentialsStore s : CredentialsProvider.lookupStores(Jenkins.get())) { if (s.getProvider() instanceof SystemCredentialsProvider.ProviderImpl) { store = s; break; } } assertThat("The system credentials provider is enabled", store, notNullValue()); } @After public void waitForJenkinsIdle() throws Exception { if (cleanupIsUnreliable()) { rule.waitUntilNoActivityUpTo(5001); } } private StandardCredentials getInvalidCredential() { String username = "bad-user"; String password = "bad-password"; CredentialsScope scope = CredentialsScope.GLOBAL; String id = "username-" + username + "-password-" + password; return new UsernamePasswordCredentialsImpl(scope, id, "desc: " + id, username, password); } @Test public void testAddGitTagAction() throws Exception { FreeStyleProject project = setupSimpleProject("master"); List<UserRemoteConfig> remoteConfigs = GitSCM.createRepoList("https://github.com/jenkinsci/git-plugin", "github"); project.setScm(new GitSCM(remoteConfigs, Collections.singletonList(new BranchSpec("master")), false, null, null, null, null)); GitSCM scm = (GitSCM) project.getScm(); final DescriptorImpl descriptor = (DescriptorImpl) scm.getDescriptor(); boolean originalValue = scm.isAddGitTagAction(); assertFalse("Wrong initial value for hide tag action", originalValue); descriptor.setAddGitTagAction(true); assertTrue("Hide tag action not set", scm.isAddGitTagAction()); descriptor.setAddGitTagAction(false); assertFalse("Wrong final value for hide tag action", scm.isAddGitTagAction()); descriptor.setAddGitTagAction(originalValue); // restore original value of addGitTagAction /* Exit test early if running on Windows and path will be too long */ /* Known limitation of git for Windows 2.28.0 and earlier */ /* Needs a longpath fix in git for Windows */ String currentDirectoryPath = new File(".").getCanonicalPath(); if (isWindows() && currentDirectoryPath.length() > 95) { return; } logRule.record(GitSCM.class, Level.FINE).capture(20); // Build 1 will not add a tag action commit("commitFileWithoutGitTagAction", johnDoe, "Commit 1 without git tag action"); build(project, Result.SUCCESS); assertThat(logRule, recorded(containsString("Not adding GitTagAction to build 1"))); // Build 2 will add a tag action descriptor.setAddGitTagAction(true); build(project, Result.SUCCESS); assertThat(logRule, recorded(containsString("Adding GitTagAction to build 2"))); // Build 3 will not add a tag action descriptor.setAddGitTagAction(false); build(project, Result.SUCCESS); assertThat(logRule, recorded(containsString("Not adding GitTagAction to build 3"))); } @Test public void manageShouldAccessGlobalConfig() throws Exception { final String USER = "user"; final String MANAGER = "manager"; rule.jenkins.setSecurityRealm(rule.createDummySecurityRealm()); rule.jenkins.setAuthorizationStrategy(new MockAuthorizationStrategy() // Read access .grant(Jenkins.READ).everywhere().to(USER) // Read and Manage .grant(Jenkins.READ).everywhere().to(MANAGER) .grant(Jenkins.MANAGE).everywhere().to(MANAGER) ); try (ACLContext c = ACL.as(User.getById(USER, true))) { Collection<Descriptor> descriptors = Functions.getSortedDescriptorsForGlobalConfigUnclassified(); assertThat("Global configuration should not be accessible to READ users", descriptors, is(empty())); } try (ACLContext c = ACL.as(User.getById(MANAGER, true))) { Collection<Descriptor> descriptors = Functions.getSortedDescriptorsForGlobalConfigUnclassified(); Optional<Descriptor> found = descriptors.stream().filter(descriptor -> descriptor instanceof GitSCM.DescriptorImpl).findFirst(); assertTrue("Global configuration should be accessible to MANAGE users", found.isPresent()); } } @Test public void trackCredentials() throws Exception { StandardCredentials credential = getInvalidCredential(); store.addCredentials(Domain.global(), credential); Fingerprint fingerprint = CredentialsProvider.getFingerprintOf(credential); assertThat("Fingerprint should not be set before job definition", fingerprint, nullValue()); JenkinsRule.WebClient wc = rule.createWebClient(); HtmlPage page = wc.goTo("credentials/store/system/domain/_/credentials/" + credential.getId()); assertThat("Have usage tracking reported", page.getElementById("usage"), notNullValue()); assertThat("No fingerprint created until first use", page.getElementById("usage-missing"), notNullValue()); assertThat("No fingerprint created until first use", page.getElementById("usage-present"), nullValue()); FreeStyleProject project = setupProject("master", credential); fingerprint = CredentialsProvider.getFingerprintOf(credential); assertThat("Fingerprint should not be set before first build", fingerprint, nullValue()); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); fingerprint = CredentialsProvider.getFingerprintOf(credential); assertThat("Fingerprint should be set after first build", fingerprint, notNullValue()); assertThat(fingerprint.getJobs(), hasItem(is(project.getFullName()))); Fingerprint.RangeSet rangeSet = fingerprint.getRangeSet(project); assertThat(rangeSet, notNullValue()); assertThat(rangeSet.includes(project.getLastBuild().getNumber()), is(true)); page = wc.goTo("credentials/store/system/domain/_/credentials/" + credential.getId()); assertThat(page.getElementById("usage-missing"), nullValue()); assertThat(page.getElementById("usage-present"), notNullValue()); assertThat(page.getAnchorByText(project.getFullDisplayName()), notNullValue()); } /** * Basic test - create a GitSCM based project, check it out and build for the first time. * Next test that polling works correctly, make another commit, check that polling finds it, * then build it and finally test the build culprits as well as the contents of the workspace. * @throws Exception on error */ @Test public void testBasic() throws Exception { FreeStyleProject project = setupSimpleProject("master"); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have only one culprit", 1, culprits.size()); assertEquals("", janeDoe.getName(), culprits.iterator().next().getFullName()); assertTrue(build2.getWorkspace().child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test @Issue("JENKINS-56176") public void testBasicRemotePoll() throws Exception { // FreeStyleProject project = setupProject("master", true, false); FreeStyleProject project = setupProject("master", false, null, null, null, true, null); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; String sha1String = commit(commitFile2, janeDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); // ... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have only one culprit", 1, culprits.size()); assertEquals("", janeDoe.getName(), culprits.iterator().next().getFullName()); assertTrue(build2.getWorkspace().child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); // JENKINS-56176 token macro expansion broke when BuildData was no longer updated assertThat(TokenMacro.expandAll(build2, listener, "${GIT_REVISION,length=7}"), is(sha1String.substring(0, 7))); assertThat(TokenMacro.expandAll(build2, listener, "${GIT_REVISION}"), is(sha1String)); assertThat(TokenMacro.expandAll(build2, listener, "$GIT_REVISION"), is(sha1String)); } @Test public void testBranchSpecWithRemotesMaster() throws Exception { FreeStyleProject projectMasterBranch = setupProject("remotes/origin/master", false, null, null, null, true, null); // create initial commit and build final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(projectMasterBranch, Result.SUCCESS, commitFile1); } /** * This test and testSpecificRefspecsWithoutCloneOption confirm behaviors of * refspecs on initial clone. Without the CloneOption to honor refspec, all * references are cloned, even if they will be later ignored due to the * refspec. With the CloneOption to ignore refspec, the initial clone also * honors the refspec and only retrieves references per the refspec. * @throws Exception on error */ @Test @Issue("JENKINS-31393") public void testSpecificRefspecs() throws Exception { List<UserRemoteConfig> repos = new ArrayList<>(); repos.add(new UserRemoteConfig(testRepo.gitDir.getAbsolutePath(), "origin", "+refs/heads/foo:refs/remotes/foo", null)); /* Set CloneOption to honor refspec on initial clone */ FreeStyleProject projectWithMaster = setupProject(repos, Collections.singletonList(new BranchSpec("master")), null, false, null); CloneOption cloneOptionMaster = new CloneOption(false, null, null); cloneOptionMaster.setHonorRefspec(true); ((GitSCM)projectWithMaster.getScm()).getExtensions().add(cloneOptionMaster); /* Set CloneOption to honor refspec on initial clone */ FreeStyleProject projectWithFoo = setupProject(repos, Collections.singletonList(new BranchSpec("foo")), null, false, null); CloneOption cloneOptionFoo = new CloneOption(false, null, null); cloneOptionFoo.setHonorRefspec(true); ((GitSCM)projectWithMaster.getScm()).getExtensions().add(cloneOptionFoo); // create initial commit final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit in master"); // create branch and make initial commit git.checkout().ref("master").branch("foo").execute(); commit(commitFile1, johnDoe, "Commit in foo"); build(projectWithMaster, Result.FAILURE); build(projectWithFoo, Result.SUCCESS, commitFile1); } /** * This test confirms the behaviour of avoiding the second fetch in GitSCM checkout() **/ @Test @Issue("JENKINS-56404") public void testAvoidRedundantFetch() throws Exception { List<UserRemoteConfig> repos = new ArrayList<>(); /* Without honor refspec on initial clone */ /* Randomly enable shallow clone, should not alter test assertions */ /** * After avoiding the second fetch call in retrieveChanges(), this test verifies there is no data loss by fetching a repository * (git init + git fetch) with a narrow refspec but without CloneOption of honorRefspec = true on initial clone * First fetch -> wide refspec * Second fetch -> narrow refspec (avoided) **/ @Test @Issue("JENKINS-56404") public void testAvoidRedundantFetchWithoutHonorRefSpec() throws Exception { List<UserRemoteConfig> repos = new ArrayList<>(); repos.add(new UserRemoteConfig(testRepo.gitDir.getAbsolutePath(), "origin", "+refs/heads/foo:refs/remotes/foo", null)); /* Without honor refspec on initial clone */ FreeStyleProject projectWithMaster = setupProject(repos, Collections.singletonList(new BranchSpec("master")), null, false, null); if (random.nextBoolean()) { /* Randomly enable shallow clone, should not alter test assertions */ CloneOption cloneOptionMaster = new CloneOption(false, null, null); cloneOptionMaster.setDepth(1); ((GitSCM) projectWithMaster.getScm()).getExtensions().add(cloneOptionMaster); } // create initial commit final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit in master"); // Add another branch 'foo' git.checkout().ref("master").branch("foo").execute(); commit(commitFile1, johnDoe, "Commit in foo"); // Build will be success because the initial clone disregards refspec and fetches all branches FreeStyleBuild build = build(projectWithMaster, Result.SUCCESS); FilePath childFile = returnFile(build); if (childFile != null) { // assert that no data is lost by avoidance of second fetch assertThat("master branch was not fetched", childFile.readToString(), containsString("master")); assertThat("foo branch was not fetched", childFile.readToString(), containsString("foo")); } /** * After avoiding the second fetch call in retrieveChanges(), this test verifies there is no data loss by fetching a * repository(git init + git fetch) with a narrow refspec with CloneOption of honorRefspec = true on initial clone * First fetch -> narrow refspec (since refspec is honored on initial clone) * Second fetch -> narrow refspec (avoided) **/ @Test @Issue("JENKINS-56404") public void testAvoidRedundantFetchWithHonorRefSpec() throws Exception { List<UserRemoteConfig> repos = new ArrayList<>(); String refSpec = "+refs/heads/foo:refs/remotes/foo"; repos.add(new UserRemoteConfig(testRepo.gitDir.getAbsolutePath(), "origin", refSpec, null)); /* With honor refspec on initial clone */ FreeStyleProject projectWithMaster = setupProject(repos, Collections.singletonList(new BranchSpec("master")), null, false, null); CloneOption cloneOptionMaster = new CloneOption(false, null, null); cloneOptionMaster.setHonorRefspec(true); ((GitSCM)projectWithMaster.getScm()).getExtensions().add(cloneOptionMaster); // create initial commit final String commitFile1 = "commitFile1"; final String commitFile1SHA1a = commit(commitFile1, johnDoe, "Commit in master"); // Add another branch 'foo' git.checkout().ref("master").branch("foo").execute(); final String commitFile1SHA1b = commit(commitFile1, johnDoe, "Commit in foo"); // Build will be failure because the initial clone regards refspec and fetches branch 'foo' only. FreeStyleBuild build = build(projectWithMaster, Result.FAILURE); FilePath childFile = returnFile(build); assertNotNull(childFile); // assert that no data is lost by avoidance of second fetch final String fetchHeadContents = childFile.readToString(); final List<String> buildLog = build.getLog(50); assertThat("master branch was fetched: " + buildLog, fetchHeadContents, not(containsString("branch 'master'"))); assertThat("foo branch was not fetched: " + buildLog, fetchHeadContents, containsString("branch 'foo'")); assertThat("master branch SHA1 '" + commitFile1SHA1a + "' fetched " + buildLog, fetchHeadContents, not(containsString(commitFile1SHA1a))); assertThat("foo branch SHA1 '" + commitFile1SHA1b + "' was not fetched " + buildLog, fetchHeadContents, containsString(commitFile1SHA1b)); assertRedundantFetchIsSkipped(build, refSpec); assertThat(build.getResult(), is(Result.FAILURE)); } @Test @Issue("JENKINS-49757") public void testAvoidRedundantFetchWithNullRefspec() throws Exception { String nullRefspec = null; List<UserRemoteConfig> repos = new ArrayList<>(); repos.add(new UserRemoteConfig(testRepo.gitDir.getAbsolutePath(), "origin", nullRefspec, null)); /* Without honor refspec on initial clone */ FreeStyleProject projectWithMaster = setupProject(repos, Collections.singletonList(new BranchSpec("master")), null, false, null); if (random.nextBoolean()) { /* Randomly enable shallow clone, should not alter test assertions */ CloneOption cloneOptionMaster = new CloneOption(false, null, null); cloneOptionMaster.setDepth(1); ((GitSCM) projectWithMaster.getScm()).getExtensions().add(cloneOptionMaster); } // create initial commit final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit in master"); FreeStyleBuild build = build(projectWithMaster, Result.SUCCESS); /* * When initial clone does not honor the refspec and a custom refspec is used * that is not part of the default refspec, then the second fetch is not * redundant and must not be fetched. * * This example uses the format to reference GitHub pull request 553. Other * formats would apply as well, but the case is illustrated well enough by * using the GitHub pull request as an example of this type of problem. */ @Test @Issue("JENKINS-49757") public void testRetainRedundantFetch() throws Exception { /* Without honor refspec on initial clone */ /* Randomly enable shallow clone, should not alter test assertions */ /* Create a ref for the fake pull in the source repository */ /* * When "Preserve second fetch during checkout" is checked in during configuring Jenkins, * the second fetch should be retained */ @Test @Issue("JENKINS-49757") public void testRetainRedundantFetchIfSecondFetchIsAllowed() throws Exception { /* Without honor refspec on initial clone */ /* Randomly enable shallow clone, should not alter test assertions */ /** * This test and testSpecificRefspecs confirm behaviors of * refspecs on initial clone. Without the CloneOption to honor refspec, all * references are cloned, even if they will be later ignored due to the * refspec. With the CloneOption to ignore refspec, the initial clone also * honors the refspec and only retrieves references per the refspec. * @throws Exception on error */ @Test @Issue("JENKINS-36507") public void testSpecificRefspecsWithoutCloneOption() throws Exception { List<UserRemoteConfig> repos = new ArrayList<>(); repos.add(new UserRemoteConfig(testRepo.gitDir.getAbsolutePath(), "origin", "+refs/heads/foo:refs/remotes/foo", null)); FreeStyleProject projectWithMaster = setupProject(repos, Collections.singletonList(new BranchSpec("master")), null, false, null); FreeStyleProject projectWithFoo = setupProject(repos, Collections.singletonList(new BranchSpec("foo")), null, false, null); // create initial commit final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit in master"); // create branch and make initial commit git.checkout().ref("master").branch("foo").execute(); commit(commitFile1, johnDoe, "Commit in foo"); build(projectWithMaster, Result.SUCCESS); /* If clone refspec had been honored, this would fail */ build(projectWithFoo, Result.SUCCESS, commitFile1); } /** * An empty remote repo URL failed the job as expected but provided * a poor diagnostic message. The fix for JENKINS-38608 improves * the error message to be clear and helpful. This test checks for * that error message. * @throws Exception on error */ @Test @Issue("JENKINS-38608") public void testAddFirstRepositoryWithNullRepoURL() throws Exception{ List<UserRemoteConfig> repos = new ArrayList<>(); repos.add(new UserRemoteConfig(null, null, null, null)); FreeStyleProject project = setupProject(repos, Collections.singletonList(new BranchSpec("master")), null, false, null); FreeStyleBuild build = build(project, Result.FAILURE); // Before JENKINS-38608 fix assertThat("Build log reports 'Null value not allowed'", build.getLog(175), not(hasItem("Null value not allowed as an environment variable: GIT_URL"))); // After JENKINS-38608 fix assertThat("Build log did not report empty string in job definition", build.getLog(175), hasItem("FATAL: Git repository URL 1 is an empty string in job definition. Checkout requires a valid repository URL")); } /** * An empty remote repo URL failed the job as expected but provided * a poor diagnostic message. The fix for JENKINS-38608 improves * the error message to be clear and helpful. This test checks for * that error message when the second URL is empty. * @throws Exception on error */ @Test @Issue("JENKINS-38608") public void testAddSecondRepositoryWithNullRepoURL() throws Exception{ String repoURL = "https://example.com/non-empty/repo/url"; List<UserRemoteConfig> repos = new ArrayList<>(); repos.add(new UserRemoteConfig(repoURL, null, null, null)); repos.add(new UserRemoteConfig(null, null, null, null)); FreeStyleProject project = setupProject(repos, Collections.singletonList(new BranchSpec("master")), null, false, null); FreeStyleBuild build = build(project, Result.FAILURE); // Before JENKINS-38608 fix assertThat("Build log reports 'Null value not allowed'", build.getLog(175), not(hasItem("Null value not allowed as an environment variable: GIT_URL_2"))); // After JENKINS-38608 fix assertThat("Build log did not report empty string in job definition for URL 2", build.getLog(175), hasItem("FATAL: Git repository URL 2 is an empty string in job definition. Checkout requires a valid repository URL")); } @Test public void testBranchSpecWithRemotesHierarchical() throws Exception { FreeStyleProject projectMasterBranch = setupProject("master", false, null, null, null, true, null); FreeStyleProject projectHierarchicalBranch = setupProject("remotes/origin/rel-1/xy", false, null, null, null, true, null); // create initial commit final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); // create hierarchical branch, delete master branch, and build git.branch("rel-1/xy"); git.checkout("rel-1/xy"); git.deleteBranch("master"); build(projectMasterBranch, Result.FAILURE); build(projectHierarchicalBranch, Result.SUCCESS, commitFile1); } @Test public void testBranchSpecUsingTagWithSlash() throws Exception { FreeStyleProject projectMasterBranch = setupProject("path/tag", false, null, null, null, true, null); // create initial commit and build final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1 will be tagged with path/tag"); testRepo.git.tag("path/tag", "tag with a slash in the tag name"); build(projectMasterBranch, Result.SUCCESS, commitFile1); } @Test public void testBasicIncludedRegion() throws Exception { FreeStyleProject project = setupProject("master", false, null, null, null, ".*3"); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); assertFalse("scm polling detected commit2 change, which should not have been included", project.poll(listener).hasChanges()); final String commitFile3 = "commitFile3"; commit(commitFile3, johnDoe, "Commit number 3"); assertTrue("scm polling did not detect commit3 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2, commitFile3); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have two culprit", 2, culprits.size()); PersonIdent[] expected = {johnDoe, janeDoe}; assertCulprits("jane doe and john doe should be the culprits", culprits, expected); assertTrue(build2.getWorkspace().child(commitFile2).exists()); assertTrue(build2.getWorkspace().child(commitFile3).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } /** * testMergeCommitInExcludedRegionIsIgnored() confirms behavior of excluded regions with merge commits. * This test has excluded and included regions, for files ending with .excluded and .included, * respectively. The git repository is set up so that a non-fast-forward merge commit comes * to master. The newly merged commit is a file ending with .excluded, so it should be ignored. * * @throws Exception on error */ @Issue({"JENKINS-20389","JENKINS-23606"}) @Test public void testMergeCommitInExcludedRegionIsIgnored() throws Exception { final String branchToMerge = "new-branch-we-merge-to-master"; FreeStyleProject project = setupProject("master", false, null, ".*\\.excluded", null, ".*\\.included"); final String initialCommit = "initialCommit"; commit(initialCommit, johnDoe, "Commit " + initialCommit + " to master"); build(project, Result.SUCCESS, initialCommit); final String secondCommit = "secondCommit"; commit(secondCommit, johnDoe, "Commit " + secondCommit + " to master"); testRepo.git.checkoutBranch(branchToMerge, "HEAD~"); final String fileToMerge = "fileToMerge.excluded"; commit(fileToMerge, johnDoe, "Commit should be ignored: " + fileToMerge + " to " + branchToMerge); ObjectId branchSHA = git.revParse("HEAD"); testRepo.git.checkoutBranch("master", "refs/heads/master"); MergeCommand mergeCommand = testRepo.git.merge(); mergeCommand.setRevisionToMerge(branchSHA); mergeCommand.execute(); // Should return false, because our merge commit falls within the excluded region. assertFalse("Polling should report no changes, because they are in the excluded region.", project.poll(listener).hasChanges()); } /** * testMergeCommitInExcludedDirectoryIsIgnored() confirms behavior of excluded directories with merge commits. * This test has excluded and included directories, named /excluded/ and /included/,respectively. The repository * is set up so that a non-fast-forward merge commit comes to master, and is in the directory /excluded/, * so it should be ignored. * * @throws Exception on error */ @Issue({"JENKINS-20389","JENKINS-23606"}) @Test public void testMergeCommitInExcludedDirectoryIsIgnored() throws Exception { final String branchToMerge = "new-branch-we-merge-to-master"; FreeStyleProject project = setupProject("master", false, null, "excluded/.*", null, "included/.*"); final String initialCommit = "initialCommit"; commit(initialCommit, johnDoe, "Commit " + initialCommit + " to master"); build(project, Result.SUCCESS, initialCommit); final String secondCommit = "secondCommit"; commit(secondCommit, johnDoe, "Commit " + secondCommit + " to master"); testRepo.git.checkoutBranch(branchToMerge, "HEAD~"); final String fileToMerge = "excluded/should-be-ignored"; commit(fileToMerge, johnDoe, "Commit should be ignored: " + fileToMerge + " to " + branchToMerge); ObjectId branchSHA = git.revParse("HEAD"); testRepo.git.checkoutBranch("master", "refs/heads/master"); MergeCommand mergeCommand = testRepo.git.merge(); mergeCommand.setRevisionToMerge(branchSHA); mergeCommand.execute(); // Should return false, because our merge commit falls within the excluded directory. assertFalse("Polling should see no changes, because they are in the excluded directory.", project.poll(listener).hasChanges()); } /** * testMergeCommitInIncludedRegionIsProcessed() confirms behavior of included regions with merge commits. * This test has excluded and included regions, for files ending with .excluded and .included, respectively. * The git repository is set up so that a non-fast-forward merge commit comes to master. The newly merged * commit is a file ending with .included, so it should be processed as a new change. * * @throws Exception on error */ @Issue({"JENKINS-20389","JENKINS-23606"}) @Test public void testMergeCommitInIncludedRegionIsProcessed() throws Exception { final String branchToMerge = "new-branch-we-merge-to-master"; FreeStyleProject project = setupProject("master", false, null, ".*\\.excluded", null, ".*\\.included"); final String initialCommit = "initialCommit"; commit(initialCommit, johnDoe, "Commit " + initialCommit + " to master"); build(project, Result.SUCCESS, initialCommit); final String secondCommit = "secondCommit"; commit(secondCommit, johnDoe, "Commit " + secondCommit + " to master"); testRepo.git.checkoutBranch(branchToMerge, "HEAD~"); final String fileToMerge = "fileToMerge.included"; commit(fileToMerge, johnDoe, "Commit should be noticed and processed as a change: " + fileToMerge + " to " + branchToMerge); ObjectId branchSHA = git.revParse("HEAD"); testRepo.git.checkoutBranch("master", "refs/heads/master"); MergeCommand mergeCommand = testRepo.git.merge(); mergeCommand.setRevisionToMerge(branchSHA); mergeCommand.execute(); // Should return true, because our commit falls within the included region. assertTrue("Polling should report changes, because they fall within the included region.", project.poll(listener).hasChanges()); } /** * testMergeCommitInIncludedRegionIsProcessed() confirms behavior of included directories with merge commits. * This test has excluded and included directories, named /excluded/ and /included/, respectively. The repository * is set up so that a non-fast-forward merge commit comes to master, and is in the directory /included/, * so it should be processed as a new change. * * @throws Exception on error */ @Issue({"JENKINS-20389","JENKINS-23606"}) @Test public void testMergeCommitInIncludedDirectoryIsProcessed() throws Exception { final String branchToMerge = "new-branch-we-merge-to-master"; FreeStyleProject project = setupProject("master", false, null, "excluded/.*", null, "included/.*"); final String initialCommit = "initialCommit"; commit(initialCommit, johnDoe, "Commit " + initialCommit + " to master"); build(project, Result.SUCCESS, initialCommit); final String secondCommit = "secondCommit"; commit(secondCommit, johnDoe, "Commit " + secondCommit + " to master"); testRepo.git.checkoutBranch(branchToMerge, "HEAD~"); final String fileToMerge = "included/should-be-processed"; commit(fileToMerge, johnDoe, "Commit should be noticed and processed as a change: " + fileToMerge + " to " + branchToMerge); ObjectId branchSHA = git.revParse("HEAD"); testRepo.git.checkoutBranch("master", "refs/heads/master"); MergeCommand mergeCommand = testRepo.git.merge(); mergeCommand.setRevisionToMerge(branchSHA); mergeCommand.execute(); // When this test passes, project.poll(listener).hasChanges()) should return // true, because our commit falls within the included region. assertTrue("Polling should report changes, because they are in the included directory.", project.poll(listener).hasChanges()); } /** * testMergeCommitOutsideIncludedRegionIsIgnored() confirms behavior of included regions with merge commits. * This test has an included region defined, for files ending with .included. There is no excluded region * defined. The repository is set up and a non-fast-forward merge commit comes to master. The newly merged commit * is a file ending with .should-be-ignored, thus falling outside of the included region, so it should ignored. * * @throws Exception on error */ @Issue({"JENKINS-20389","JENKINS-23606"}) @Test public void testMergeCommitOutsideIncludedRegionIsIgnored() throws Exception { final String branchToMerge = "new-branch-we-merge-to-master"; FreeStyleProject project = setupProject("master", false, null, null, null, ".*\\.included"); final String initialCommit = "initialCommit"; commit(initialCommit, johnDoe, "Commit " + initialCommit + " to master"); build(project, Result.SUCCESS, initialCommit); final String secondCommit = "secondCommit"; commit(secondCommit, johnDoe, "Commit " + secondCommit + " to master"); testRepo.git.checkoutBranch(branchToMerge, "HEAD~"); final String fileToMerge = "fileToMerge.should-be-ignored"; commit(fileToMerge, johnDoe, "Commit should be ignored: " + fileToMerge + " to " + branchToMerge); ObjectId branchSHA = git.revParse("HEAD"); testRepo.git.checkoutBranch("master", "refs/heads/master"); MergeCommand mergeCommand = testRepo.git.merge(); mergeCommand.setRevisionToMerge(branchSHA); mergeCommand.execute(); // Should return false, because our commit falls outside the included region. assertFalse("Polling should ignore the change, because it falls outside the included region.", project.poll(listener).hasChanges()); } /** * testMergeCommitOutsideIncludedDirectoryIsIgnored() confirms behavior of included directories with merge commits. * This test has only an included directory `/included` defined. The git repository is set up so that * a non-fast-forward, but mergeable, commit comes to master. The newly merged commit is outside of the * /included/ directory, so polling should report no changes. * * @throws Exception on error */ @Issue({"JENKINS-20389","JENKINS-23606"}) @Test public void testMergeCommitOutsideIncludedDirectoryIsIgnored() throws Exception { final String branchToMerge = "new-branch-we-merge-to-master"; FreeStyleProject project = setupProject("master", false, null, null, null, "included/.*"); final String initialCommit = "initialCommit"; commit(initialCommit, johnDoe, "Commit " + initialCommit + " to master"); build(project, Result.SUCCESS, initialCommit); final String secondCommit = "secondCommit"; commit(secondCommit, johnDoe, "Commit " + secondCommit + " to master"); testRepo.git.checkoutBranch(branchToMerge, "HEAD~"); final String fileToMerge = "directory-to-ignore/file-should-be-ignored"; commit(fileToMerge, johnDoe, "Commit should be ignored: " + fileToMerge + " to " + branchToMerge); ObjectId branchSHA = git.revParse("HEAD"); testRepo.git.checkoutBranch("master", "refs/heads/master"); MergeCommand mergeCommand = testRepo.git.merge(); mergeCommand.setRevisionToMerge(branchSHA); mergeCommand.execute(); // Should return false, because our commit falls outside of the included directory assertFalse("Polling should ignore the change, because it falls outside the included directory.", project.poll(listener).hasChanges()); } /** * testMergeCommitOutsideExcludedRegionIsProcessed() confirms behavior of excluded regions with merge commits. * This test has an excluded region defined, for files ending with .excluded. There is no included region defined. * The repository is set up so a non-fast-forward merge commit comes to master. The newly merged commit is a file * ending with .should-be-processed, thus falling outside of the excluded region, so it should processed * as a new change. * * @throws Exception on error */ @Issue({"JENKINS-20389","JENKINS-23606"}) @Test public void testMergeCommitOutsideExcludedRegionIsProcessed() throws Exception { final String branchToMerge = "new-branch-we-merge-to-master"; FreeStyleProject project = setupProject("master", false, null, ".*\\.excluded", null, null); final String initialCommit = "initialCommit"; commit(initialCommit, johnDoe, "Commit " + initialCommit + " to master"); build(project, Result.SUCCESS, initialCommit); final String secondCommit = "secondCommit"; commit(secondCommit, johnDoe, "Commit " + secondCommit + " to master"); testRepo.git.checkoutBranch(branchToMerge, "HEAD~"); final String fileToMerge = "fileToMerge.should-be-processed"; commit(fileToMerge, johnDoe, "Commit should be noticed and processed as a change: " + fileToMerge + " to " + branchToMerge); ObjectId branchSHA = git.revParse("HEAD"); testRepo.git.checkoutBranch("master", "refs/heads/master"); MergeCommand mergeCommand = testRepo.git.merge(); mergeCommand.setRevisionToMerge(branchSHA); mergeCommand.execute(); // Should return true, because our commit falls outside of the excluded region assertTrue("Polling should process the change, because it falls outside the excluded region.", project.poll(listener).hasChanges()); } /** * testMergeCommitOutsideExcludedDirectoryIsProcessed() confirms behavior of excluded directories with merge commits. * This test has an excluded directory `excluded` defined. There is no `included` directory defined. The repository * is set up so that a non-fast-forward merge commit comes to master. The newly merged commit resides in a * directory of its own, thus falling outside of the excluded directory, so it should processed * as a new change. * * @throws Exception on error */ @Issue({"JENKINS-20389","JENKINS-23606"}) @Test public void testMergeCommitOutsideExcludedDirectoryIsProcessed() throws Exception { final String branchToMerge = "new-branch-we-merge-to-master"; FreeStyleProject project = setupProject("master", false, null, "excluded/.*", null, null); final String initialCommit = "initialCommit"; commit(initialCommit, johnDoe, "Commit " + initialCommit + " to master"); build(project, Result.SUCCESS, initialCommit); final String secondCommit = "secondCommit"; commit(secondCommit, johnDoe, "Commit " + secondCommit + " to master"); testRepo.git.checkoutBranch(branchToMerge, "HEAD~"); // Create this new file outside of our excluded directory final String fileToMerge = "directory-to-include/file-should-be-processed"; commit(fileToMerge, johnDoe, "Commit should be noticed and processed as a change: " + fileToMerge + " to " + branchToMerge); ObjectId branchSHA = git.revParse("HEAD"); testRepo.git.checkoutBranch("master", "refs/heads/master"); MergeCommand mergeCommand = testRepo.git.merge(); mergeCommand.setRevisionToMerge(branchSHA); mergeCommand.execute(); // Should return true, because our commit falls outside of the excluded directory assertTrue("SCM polling should process the change, because it falls outside the excluded directory.", project.poll(listener).hasChanges()); } @Test public void testIncludedRegionWithDeeperCommits() throws Exception { FreeStyleProject project = setupProject("master", false, null, null, null, ".*3"); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); assertFalse("scm polling detected commit2 change, which should not have been included", project.poll(listener).hasChanges()); final String commitFile3 = "commitFile3"; commit(commitFile3, johnDoe, "Commit number 3"); final String commitFile4 = "commitFile4"; commit(commitFile4, janeDoe, "Commit number 4"); assertTrue("scm polling did not detect commit3 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2, commitFile3); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have two culprit", 2, culprits.size()); PersonIdent[] expected = {johnDoe, janeDoe}; assertCulprits("jane doe and john doe should be the culprits", culprits, expected); assertTrue(build2.getWorkspace().child(commitFile2).exists()); assertTrue(build2.getWorkspace().child(commitFile3).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test public void testBasicExcludedRegion() throws Exception { FreeStyleProject project = setupProject("master", false, null, ".*2", null, null); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); assertFalse("scm polling detected commit2 change, which should have been excluded", project.poll(listener).hasChanges()); final String commitFile3 = "commitFile3"; commit(commitFile3, johnDoe, "Commit number 3"); assertTrue("scm polling did not detect commit3 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2, commitFile3); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have two culprit", 2, culprits.size()); PersonIdent[] expected = {johnDoe, janeDoe}; assertCulprits("jane doe and john doe should be the culprits", culprits, expected); assertTrue(build2.getWorkspace().child(commitFile2).exists()); assertTrue(build2.getWorkspace().child(commitFile3).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } private int findLogLineStartsWith(List<String> buildLog, String initialString) { int logLine = 0; for (String logString : buildLog) { if (logString.startsWith(initialString)) { return logLine; } logLine++; } return -1; } @Test public void testCleanBeforeCheckout() throws Exception { FreeStyleProject p = setupProject("master", false, null, null, "Jane Doe", null); ((GitSCM)p.getScm()).getExtensions().add(new CleanBeforeCheckout()); /* First build should not clean, since initial clone is always clean */ final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, janeDoe, "Commit number 1"); final FreeStyleBuild firstBuild = build(p, Result.SUCCESS, commitFile1); assertThat(firstBuild.getLog(50), not(hasItem("Cleaning workspace"))); /* Second build should clean, since first build might have modified the workspace */ final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, janeDoe, "Commit number 2"); final FreeStyleBuild secondBuild = build(p, Result.SUCCESS, commitFile2); List<String> secondLog = secondBuild.getLog(50); assertThat(secondLog, hasItem("Cleaning workspace")); int cleaningLogLine = findLogLineStartsWith(secondLog, "Cleaning workspace"); int fetchingLogLine = findLogLineStartsWith(secondLog, "Fetching upstream changes from "); assertThat("Cleaning should happen before fetch", cleaningLogLine, is(lessThan(fetchingLogLine))); } @Issue("JENKINS-8342") @Test public void testExcludedRegionMultiCommit() throws Exception { // Got 2 projects, each one should only build if changes in its own file FreeStyleProject clientProject = setupProject("master", false, null, ".*serverFile", null, null); FreeStyleProject serverProject = setupProject("master", false, null, ".*clientFile", null, null); String initialCommitFile = "initialFile"; commit(initialCommitFile, johnDoe, "initial commit"); build(clientProject, Result.SUCCESS, initialCommitFile); build(serverProject, Result.SUCCESS, initialCommitFile); assertFalse("scm polling should not detect any more changes after initial build", clientProject.poll(listener).hasChanges()); assertFalse("scm polling should not detect any more changes after initial build", serverProject.poll(listener).hasChanges()); // Got commits on serverFile, so only server project should build. commit("myserverFile", johnDoe, "commit first server file"); assertFalse("scm polling should not detect any changes in client project", clientProject.poll(listener).hasChanges()); assertTrue("scm polling did not detect changes in server project", serverProject.poll(listener).hasChanges()); // Got commits on both client and serverFile, so both projects should build. commit("myNewserverFile", johnDoe, "commit new server file"); commit("myclientFile", johnDoe, "commit first clientfile"); assertTrue("scm polling did not detect changes in client project", clientProject.poll(listener).hasChanges()); assertTrue("scm polling did not detect changes in server project", serverProject.poll(listener).hasChanges()); } /* * With multiple branches specified in the project and having commits from a user * excluded should not build the excluded revisions when another branch changes. */ /* @Issue("JENKINS-8342") @Test public void testMultipleBranchWithExcludedUser() throws Exception { final String branch1 = "Branch1"; final String branch2 = "Branch2"; List<BranchSpec> branches = new ArrayList<BranchSpec>(); branches.add(new BranchSpec("master")); branches.add(new BranchSpec(branch1)); branches.add(new BranchSpec(branch2)); final FreeStyleProject project = setupProject(branches, false, null, null, janeDoe.getName(), null, false, null); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); // create branches here so we can get back to them later... git.branch(branch1); git.branch(branch2); final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); final String commitFile3 = "commitFile3"; commit(commitFile3, johnDoe, "Commit number 3"); assertTrue("scm polling should detect changes in 'master' branch", project.poll(listener).hasChanges()); build(project, Result.SUCCESS, commitFile1, commitFile2); assertFalse("scm polling should not detect any more changes after last build", project.poll(listener).hasChanges()); // Add excluded commit final String commitFile4 = "commitFile4"; commit(commitFile4, janeDoe, "Commit number 4"); assertFalse("scm polling detected change in 'master', which should have been excluded", project.poll(listener).hasChanges()); // now jump back... git.checkout(branch1); final String branch1File1 = "branch1File1"; commit(branch1File1, janeDoe, "Branch1 commit number 1"); assertFalse("scm polling detected change in 'Branch1', which should have been excluded", project.poll(listener).hasChanges()); // and the other branch... git.checkout(branch2); final String branch2File1 = "branch2File1"; commit(branch2File1, janeDoe, "Branch2 commit number 1"); assertFalse("scm polling detected change in 'Branch2', which should have been excluded", project.poll(listener).hasChanges()); final String branch2File2 = "branch2File2"; commit(branch2File2, johnDoe, "Branch2 commit number 2"); assertTrue("scm polling should detect changes in 'Branch2' branch", project.poll(listener).hasChanges()); //... and build it... build(project, Result.SUCCESS, branch2File1, branch2File2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); // now jump back again... git.checkout(branch1); // Commit excluded after non-excluded commit, should trigger build. final String branch1File2 = "branch1File2"; commit(branch1File2, johnDoe, "Branch1 commit number 2"); final String branch1File3 = "branch1File3"; commit(branch1File3, janeDoe, "Branch1 commit number 3"); assertTrue("scm polling should detect changes in 'Branch1' branch", project.poll(listener).hasChanges()); build(project, Result.SUCCESS, branch1File1, branch1File2, branch1File3); } */ @Test public void testBasicExcludedUser() throws Exception { FreeStyleProject project = setupProject("master", false, null, null, "Jane Doe", null); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); assertFalse("scm polling detected commit2 change, which should have been excluded", project.poll(listener).hasChanges()); final String commitFile3 = "commitFile3"; commit(commitFile3, johnDoe, "Commit number 3"); assertTrue("scm polling did not detect commit3 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2, commitFile3); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have two culprit", 2, culprits.size()); PersonIdent[] expected = {johnDoe, janeDoe}; assertCulprits("jane doe and john doe should be the culprits", culprits, expected); assertTrue(build2.getWorkspace().child(commitFile2).exists()); assertTrue(build2.getWorkspace().child(commitFile3).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test public void testBasicInSubdir() throws Exception { FreeStyleProject project = setupSimpleProject("master"); ((GitSCM)project.getScm()).getExtensions().add(new RelativeTargetDirectory("subdir")); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, "subdir", Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, "subdir", Result.SUCCESS, commitFile2); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have only one culprit", 1, culprits.size()); assertEquals("", janeDoe.getName(), culprits.iterator().next().getFullName()); assertEquals("The workspace should have a 'subdir' subdirectory, but does not.", true, build2.getWorkspace().child("subdir").exists()); assertEquals("The 'subdir' subdirectory should contain commitFile2, but does not.", true, build2.getWorkspace().child("subdir").child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test public void testBasicWithAgent() throws Exception { FreeStyleProject project = setupSimpleProject("master"); project.setAssignedLabel(rule.createSlave().getSelfLabel()); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have only one culprit", 1, culprits.size()); assertEquals("", janeDoe.getName(), culprits.iterator().next().getFullName()); assertTrue(build2.getWorkspace().child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Issue("HUDSON-7547") @Test public void testBasicWithAgentNoExecutorsOnMaster() throws Exception { FreeStyleProject project = setupSimpleProject("master"); rule.jenkins.setNumExecutors(0); project.setAssignedLabel(rule.createSlave().getSelfLabel()); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have only one culprit", 1, culprits.size()); assertEquals("", janeDoe.getName(), culprits.iterator().next().getFullName()); assertTrue(build2.getWorkspace().child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test public void testAuthorOrCommitterFalse() throws Exception { // Test with authorOrCommitter set to false and make sure we get the committer. FreeStyleProject project = setupSimpleProject("master"); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, janeDoe, "Commit number 1"); final FreeStyleBuild firstBuild = build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, janeDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); final FreeStyleBuild secondBuild = build(project, Result.SUCCESS, commitFile2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final Set<User> secondCulprits = secondBuild.getCulprits(); assertEquals("The build should have only one culprit", 1, secondCulprits.size()); assertEquals("Did not get the committer as the change author with authorOrCommitter==false", janeDoe.getName(), secondCulprits.iterator().next().getFullName()); } @Test public void testAuthorOrCommitterTrue() throws Exception { // Next, test with authorOrCommitter set to true and make sure we get the author. FreeStyleProject project = setupSimpleProject("master"); ((GitSCM)project.getScm()).getExtensions().add(new AuthorInChangelog()); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, janeDoe, "Commit number 1"); final FreeStyleBuild firstBuild = build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, janeDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); final FreeStyleBuild secondBuild = build(project, Result.SUCCESS, commitFile2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final Set<User> secondCulprits = secondBuild.getCulprits(); assertEquals("The build should have only one culprit", 1, secondCulprits.size()); assertEquals("Did not get the author as the change author with authorOrCommitter==true", johnDoe.getName(), secondCulprits.iterator().next().getFullName()); } @Test public void testNewCommitToUntrackedBranchDoesNotTriggerBuild() throws Exception { FreeStyleProject project = setupSimpleProject("master"); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); //now create and checkout a new branch: git.checkout(Constants.HEAD, "untracked"); //.. and commit to it: final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); assertFalse("scm polling should not detect commit2 change because it is not in the branch we are tracking.", project.poll(listener).hasChanges()); } private String checkoutString(FreeStyleProject project, String envVar) { return "checkout -f " + getEnvVars(project).get(envVar); } @Test public void testEnvVarsAvailable() throws Exception { FreeStyleProject project = setupSimpleProject("master"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertEquals("origin/master", getEnvVars(project).get(GitSCM.GIT_BRANCH)); rule.waitForMessage(getEnvVars(project).get(GitSCM.GIT_BRANCH), build1); rule.waitForMessage(checkoutString(project, GitSCM.GIT_COMMIT), build1); final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); rule.assertLogNotContains(checkoutString(project, GitSCM.GIT_PREVIOUS_COMMIT), build2); rule.waitForMessage(checkoutString(project, GitSCM.GIT_PREVIOUS_COMMIT), build1); rule.assertLogNotContains(checkoutString(project, GitSCM.GIT_PREVIOUS_SUCCESSFUL_COMMIT), build2); rule.waitForMessage(checkoutString(project, GitSCM.GIT_PREVIOUS_SUCCESSFUL_COMMIT), build1); } @Issue("HUDSON-7411") @Test public void testNodeEnvVarsAvailable() throws Exception { FreeStyleProject project = setupSimpleProject("master"); DumbSlave agent = rule.createSlave(); setVariables(agent, new Entry("TESTKEY", "agent value")); project.setAssignedLabel(agent.getSelfLabel()); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); assertEquals("agent value", getEnvVars(project).get("TESTKEY")); } @Test public void testNodeOverrideGit() throws Exception { GitSCM scm = new GitSCM(null); DumbSlave agent = rule.createSlave(); GitTool.DescriptorImpl gitToolDescriptor = rule.jenkins.getDescriptorByType(GitTool.DescriptorImpl.class); GitTool installation = new GitTool("Default", "/usr/bin/git", null); gitToolDescriptor.setInstallations(installation); String gitExe = scm.getGitExe(agent, TaskListener.NULL); assertEquals("/usr/bin/git", gitExe); ToolLocationNodeProperty nodeGitLocation = new ToolLocationNodeProperty(new ToolLocationNodeProperty.ToolLocation(gitToolDescriptor, "Default", "C:\\Program Files\\Git\\bin\\git.exe")); agent.setNodeProperties(Collections.singletonList(nodeGitLocation)); gitExe = scm.getGitExe(agent, TaskListener.NULL); assertEquals("C:\\Program Files\\Git\\bin\\git.exe", gitExe); } /* * A previous version of GitSCM would only build against branches, not tags. This test checks that that * regression has been fixed. */ @Test public void testGitSCMCanBuildAgainstTags() throws Exception { final String mytag = "mytag"; FreeStyleProject project = setupSimpleProject(mytag); build(project, Result.FAILURE); // fail, because there's nothing to be checked out here final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); // Try again. The first build will leave the repository in a bad state because we // cloned something without even a HEAD - which will mean it will want to re-clone once there is some // actual data. build(project, Result.FAILURE); // fail, because there's nothing to be checked out here //now create and checkout a new branch: final String tmpBranch = "tmp"; git.branch(tmpBranch); git.checkout(tmpBranch); // commit to it final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); assertFalse("scm polling should not detect any more changes since mytag is untouched right now", project.poll(listener).hasChanges()); build(project, Result.FAILURE); // fail, because there's nothing to be checked out here // tag it, then delete the tmp branch git.tag(mytag, "mytag initial"); git.checkout("master"); git.deleteBranch(tmpBranch); // at this point we're back on master, there are no other branches, tag "mytag" exists but is // not part of "master" assertTrue("scm polling should detect commit2 change in 'mytag'", project.poll(listener).hasChanges()); build(project, Result.SUCCESS, commitFile2); assertFalse("scm polling should not detect any more changes after last build", project.poll(listener).hasChanges()); // now, create tmp branch again against mytag: git.checkout(mytag); git.branch(tmpBranch); // another commit: final String commitFile3 = "commitFile3"; commit(commitFile3, johnDoe, "Commit number 3"); assertFalse("scm polling should not detect any more changes since mytag is untouched right now", project.poll(listener).hasChanges()); // now we're going to force mytag to point to the new commit, if everything goes well, gitSCM should pick the change up: git.tag(mytag, "mytag moved"); git.checkout("master"); git.deleteBranch(tmpBranch); // at this point we're back on master, there are no other branches, "mytag" has been updated to a new commit: assertTrue("scm polling should detect commit3 change in 'mytag'", project.poll(listener).hasChanges()); build(project, Result.SUCCESS, commitFile3); assertFalse("scm polling should not detect any more changes after last build", project.poll(listener).hasChanges()); } /* * Not specifying a branch string in the project implies that we should be polling for changes in * all branches. */ @Test public void testMultipleBranchBuild() throws Exception { // empty string will result in a project that tracks against changes in all branches: final FreeStyleProject project = setupSimpleProject(""); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); // create a branch here so we can get back to this point later... final String fork = "fork"; git.branch(fork); final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); final String commitFile3 = "commitFile3"; commit(commitFile3, johnDoe, "Commit number 3"); assertTrue("scm polling should detect changes in 'master' branch", project.poll(listener).hasChanges()); build(project, Result.SUCCESS, commitFile1, commitFile2); assertFalse("scm polling should not detect any more changes after last build", project.poll(listener).hasChanges()); // now jump back... git.checkout(fork); // add some commits to the fork branch... final String forkFile1 = "forkFile1"; commit(forkFile1, johnDoe, "Fork commit number 1"); final String forkFile2 = "forkFile2"; commit(forkFile2, johnDoe, "Fork commit number 2"); assertTrue("scm polling should detect changes in 'fork' branch", project.poll(listener).hasChanges()); build(project, Result.SUCCESS, forkFile1, forkFile2); assertFalse("scm polling should not detect any more changes after last build", project.poll(listener).hasChanges()); } @Test public void testMultipleBranchesWithTags() throws Exception { List<BranchSpec> branchSpecs = Arrays.asList( new BranchSpec("refs/tags/v*"), new BranchSpec("refs/remotes/origin/non-existent")); FreeStyleProject project = setupProject(branchSpecs, false, null, null, janeDoe.getName(), null, false, null); // create initial commit and then run the build against it: // Here the changelog is by default empty (because changelog for first commit is always empty commit("commitFileBase", johnDoe, "Initial Commit"); // there are no branches to be build FreeStyleBuild freeStyleBuild = build(project, Result.FAILURE); final String v1 = "v1"; git.tag(v1, "version 1"); assertTrue("v1 tag exists", git.tagExists(v1)); freeStyleBuild = build(project, Result.SUCCESS); assertTrue("change set is empty", freeStyleBuild.getChangeSet().isEmptySet()); commit("file1", johnDoe, "change to file1"); git.tag("none", "latest"); freeStyleBuild = build(project, Result.SUCCESS); ObjectId tag = git.revParse(Constants.R_TAGS + v1); GitSCM scm = (GitSCM)project.getScm(); BuildData buildData = scm.getBuildData(freeStyleBuild); assertEquals("last build matches the v1 tag revision", tag, buildData.lastBuild.getSHA1()); } @Issue("JENKINS-19037") @SuppressWarnings("ResultOfObjectAllocationIgnored") @Test public void testBlankRepositoryName() throws Exception { new GitSCM(null); } @Issue("JENKINS-10060") @Test public void testSubmoduleFixup() throws Exception { /* Unreliable on Windows and not a platform specific test */ Assume.assumeFalse(isWindows()); File repo = secondRepo.getRoot(); FilePath moduleWs = new FilePath(repo); org.jenkinsci.plugins.gitclient.GitClient moduleRepo = Git.with(listener, new EnvVars()).in(repo).getClient(); {// first we create a Git repository with submodule moduleRepo.init(); moduleWs.child("a").touch(0); moduleRepo.add("a"); moduleRepo.commit("creating a module"); git.addSubmodule(repo.getAbsolutePath(), "module1"); git.commit("creating a super project"); } // configure two uproject 'u' -> 'd' that's chained together. FreeStyleProject u = createFreeStyleProject(); FreeStyleProject d = createFreeStyleProject(); u.setScm(new GitSCM(workDir.getPath())); u.getPublishersList().add(new BuildTrigger(new hudson.plugins.parameterizedtrigger.BuildTriggerConfig(d.getName(), ResultCondition.SUCCESS, new GitRevisionBuildParameters()))); d.setScm(new GitSCM(workDir.getPath())); rule.jenkins.rebuildDependencyGraph(); FreeStyleBuild ub = rule.buildAndAssertSuccess(u); for (int i=0; (d.getLastBuild()==null || d.getLastBuild().isBuilding()) && i<100; i++) // wait only up to 10 sec to avoid infinite loop Thread.sleep(100); FreeStyleBuild db = d.getLastBuild(); assertNotNull("downstream build didn't happen",db); db = rule.waitForCompletion(db); rule.assertBuildStatusSuccess(db); } @Test public void testBuildChooserContext() throws Exception { final FreeStyleProject p = createFreeStyleProject(); final FreeStyleBuild b = rule.buildAndAssertSuccess(p); BuildChooserContextImpl c = new BuildChooserContextImpl(p, b, null); c.actOnBuild(new ContextCallable<Run<?,?>, Object>() { public Object invoke(Run param, VirtualChannel channel) throws IOException, InterruptedException { assertSame(param,b); return null; } }); c.actOnProject(new ContextCallable<Job<?,?>, Object>() { public Object invoke(Job param, VirtualChannel channel) throws IOException, InterruptedException { assertSame(param,p); return null; } }); DumbSlave agent = rule.createOnlineSlave(); assertEquals(p.toString(), agent.getChannel().call(new BuildChooserContextTestCallable(c))); } private static class BuildChooserContextTestCallable extends MasterToSlaveCallable<String,IOException> { private final BuildChooserContext c; public BuildChooserContextTestCallable(BuildChooserContext c) { this.c = c; } public String call() throws IOException { try { return c.actOnProject(new ContextCallable<Job<?,?>, String>() { public String invoke(Job<?,?> param, VirtualChannel channel) throws IOException, InterruptedException { assertTrue(channel instanceof Channel); assertTrue(Jenkins.getInstanceOrNull()!=null); return param.toString(); } }); } catch (InterruptedException e) { throw new IOException(e); } } } // eg: "jane doe and john doe should be the culprits", culprits, [johnDoe, janeDoe]) static public void assertCulprits(String assertMsg, Set<User> actual, PersonIdent[] expected) { List<String> fullNames = actual.stream().map(User::getFullName).collect(Collectors.toList()); for(PersonIdent p : expected) { assertTrue(assertMsg, fullNames.contains(p.getName())); } } @Test public void testHideCredentials() throws Exception { FreeStyleProject project = setupSimpleProject("master"); store.addCredentials(Domain.global(), createCredential(CredentialsScope.GLOBAL, "github")); // setup global config List<UserRemoteConfig> remoteConfigs = GitSCM.createRepoList("https://github.com/jenkinsci/git-plugin", "github"); project.setScm(new GitSCM(remoteConfigs, Collections.singletonList(new BranchSpec("master")), false, null, null, null, null)); GitSCM scm = (GitSCM) project.getScm(); final DescriptorImpl descriptor = (DescriptorImpl) scm.getDescriptor(); assertFalse("Wrong initial value for hide credentials", scm.isHideCredentials()); descriptor.setHideCredentials(true); assertTrue("Hide credentials not set", scm.isHideCredentials()); /* Exit test early if running on Windows and path will be too long */ /* Known limitation of git for Windows 2.28.0 and earlier */ /* Needs a longpath fix in git for Windows */ String currentDirectoryPath = new File(".").getCanonicalPath(); if (isWindows() && currentDirectoryPath.length() > 95) { return; } descriptor.setHideCredentials(false); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS); List<String> logLines = project.getLastBuild().getLog(100); assertThat(logLines, hasItem("using credential github")); descriptor.setHideCredentials(true); build(project, Result.SUCCESS); logLines = project.getLastBuild().getLog(100); assertThat(logLines, not(hasItem("using credential github"))); } @Test public void testEmailCommitter() throws Exception { FreeStyleProject project = setupSimpleProject("master"); // setup global config GitSCM scm = (GitSCM) project.getScm(); final DescriptorImpl descriptor = (DescriptorImpl) scm.getDescriptor(); assertFalse("Wrong initial value for create account based on e-mail", scm.isCreateAccountBasedOnEmail()); descriptor.setCreateAccountBasedOnEmail(true); assertTrue("Create account based on e-mail not set", scm.isCreateAccountBasedOnEmail()); assertFalse("Wrong initial value for use existing user if same e-mail already found", scm.isUseExistingAccountWithSameEmail()); descriptor.setUseExistingAccountWithSameEmail(true); assertTrue("Use existing user if same e-mail already found is not set", scm.isUseExistingAccountWithSameEmail()); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final FreeStyleBuild build = build(project, Result.SUCCESS, commitFile1); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; final PersonIdent jeffDoe = new PersonIdent("Jeff Doe", "jeff@doe.com"); commit(commitFile2, jeffDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); final Set<User> culprits = build2.getCulprits(); assertEquals("The build should have only one culprit", 1, culprits.size()); User culprit = culprits.iterator().next(); assertEquals("", jeffDoe.getEmailAddress(), culprit.getId()); assertEquals("", jeffDoe.getName(), culprit.getFullName()); rule.assertBuildStatusSuccess(build); } @Issue("JENKINS-59868") @Test public void testNonExistentWorkingDirectoryPoll() throws Exception { FreeStyleProject project = setupSimpleProject("master"); // create initial commit and then run the build against it final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); project.setScm(new GitSCM( ((GitSCM)project.getScm()).getUserRemoteConfigs(), Collections.singletonList(new BranchSpec("master")), null, null, // configure GitSCM with the DisableRemotePoll extension to ensure that polling use the workspace Collections.singletonList(new DisableRemotePoll()))); FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); // Empty the workspace directory build1.getWorkspace().deleteRecursive(); // Setup a recorder for polling logs RingBufferLogHandler pollLogHandler = new RingBufferLogHandler(10); Logger pollLogger = Logger.getLogger(GitSCMTest.class.getName()); pollLogger.addHandler(pollLogHandler); TaskListener taskListener = new LogTaskListener(pollLogger, Level.INFO); // Make sure that polling returns BUILD_NOW and properly log the reason FilePath filePath = build1.getWorkspace(); assertThat(project.getScm().compareRemoteRevisionWith(project, new Launcher.LocalLauncher(taskListener), filePath, taskListener, null), is(PollingResult.BUILD_NOW)); assertTrue(pollLogHandler.getView().stream().anyMatch(m -> m.getMessage().contains("[poll] Working Directory does not exist"))); } // Disabled - consistently fails, needs more analysis // @Test public void testFetchFromMultipleRepositories() throws Exception { FreeStyleProject project = setupSimpleProject("master"); TestGitRepo secondTestRepo = new TestGitRepo("second", secondRepo.getRoot(), listener); List<UserRemoteConfig> remotes = new ArrayList<>(); remotes.addAll(testRepo.remoteConfigs()); remotes.addAll(secondTestRepo.remoteConfigs()); project.setScm(new GitSCM( remotes, Collections.singletonList(new BranchSpec("master")), null, null, Collections.<GitSCMExtension>emptyList())); // create initial commit and then run the build against it: final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); build(project, Result.SUCCESS, commitFile1); /* Diagnostic help - for later use */ SCMRevisionState baseline = project.poll(listener).baseline; Change change = project.poll(listener).change; SCMRevisionState remote = project.poll(listener).remote; String assertionMessage = MessageFormat.format("polling incorrectly detected change after build. Baseline: {0}, Change: {1}, Remote: {2}", baseline, change, remote); assertFalse(assertionMessage, project.poll(listener).hasChanges()); final String commitFile2 = "commitFile2"; secondTestRepo.commit(commitFile2, janeDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); //... and build it... final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); assertTrue(build2.getWorkspace().child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } private void branchSpecWithMultipleRepositories(String branchName) throws Exception { FreeStyleProject project = setupSimpleProject("master"); TestGitRepo secondTestRepo = new TestGitRepo("second", secondRepo.getRoot(), listener); List<UserRemoteConfig> remotes = new ArrayList<>(); remotes.addAll(testRepo.remoteConfigs()); remotes.addAll(secondTestRepo.remoteConfigs()); // create initial commit final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); project.setScm(new GitSCM( remotes, Collections.singletonList(new BranchSpec(branchName)), null, null, Collections.<GitSCMExtension>emptyList())); final FreeStyleBuild build = build(project, Result.SUCCESS, commitFile1); rule.assertBuildStatusSuccess(build); } @Issue("JENKINS-26268") public void testBranchSpecAsSHA1WithMultipleRepositories() throws Exception { branchSpecWithMultipleRepositories(testRepo.git.revParse("HEAD").getName()); } @Issue("JENKINS-26268") public void testBranchSpecAsRemotesOriginMasterWithMultipleRepositories() throws Exception { branchSpecWithMultipleRepositories("remotes/origin/master"); } @Issue("JENKINS-25639") @Test public void testCommitDetectedOnlyOnceInMultipleRepositories() throws Exception { FreeStyleProject project = setupSimpleProject("master"); TestGitRepo secondTestRepo = new TestGitRepo("secondRepo", secondRepo.getRoot(), listener); List<UserRemoteConfig> remotes = new ArrayList<>(); remotes.addAll(testRepo.remoteConfigs()); remotes.addAll(secondTestRepo.remoteConfigs()); GitSCM gitSCM = new GitSCM( remotes, Collections.singletonList(new BranchSpec("origin/master")), null, null, Collections.<GitSCMExtension>emptyList()); project.setScm(gitSCM); /* Check that polling would force build through * compareRemoteRevisionWith by detecting no last build */ FilePath filePath = new FilePath(new File(".")); assertThat(gitSCM.compareRemoteRevisionWith(project, new Launcher.LocalLauncher(listener), filePath, listener, null), is(PollingResult.BUILD_NOW)); commit("commitFile1", johnDoe, "Commit number 1"); FreeStyleBuild build = build(project, Result.SUCCESS, "commitFile1"); commit("commitFile2", johnDoe, "Commit number 2"); git = Git.with(listener, new EnvVars()).in(build.getWorkspace()).getClient(); for (RemoteConfig remoteConfig : gitSCM.getRepositories()) { git.fetch_().from(remoteConfig.getURIs().get(0), remoteConfig.getFetchRefSpecs()); } BuildChooser buildChooser = gitSCM.getBuildChooser(); Collection<Revision> candidateRevisions = buildChooser.getCandidateRevisions(false, "origin/master", git, listener, project.getLastBuild().getAction(BuildData.class), null); assertEquals(1, candidateRevisions.size()); gitSCM.setBuildChooser(buildChooser); // Should be a no-op Collection<Revision> candidateRevisions2 = buildChooser.getCandidateRevisions(false, "origin/master", git, listener, project.getLastBuild().getAction(BuildData.class), null); assertThat(candidateRevisions2, is(candidateRevisions)); } private final Random random = new Random(); private boolean useChangelogToBranch = random.nextBoolean(); private void addChangelogToBranchExtension(GitSCM scm) { if (useChangelogToBranch) { /* Changelog should be no different with this enabled or disabled */ ChangelogToBranchOptions changelogOptions = new ChangelogToBranchOptions("origin", "master"); scm.getExtensions().add(new ChangelogToBranch(changelogOptions)); } useChangelogToBranch = !useChangelogToBranch; } @Test public void testMerge() throws Exception { FreeStyleProject project = setupSimpleProject("master"); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("*")), null, null, Collections.<GitSCMExtension>emptyList()); scm.getExtensions().add(new PreBuildMerge(new UserMergeOptions("origin", "integration", "default", MergeCommand.GitPluginFastForwardMode.FF))); addChangelogToBranchExtension(scm); project.setScm(scm); // create initial commit and then run the build against it: commit("commitFileBase", johnDoe, "Initial Commit"); testRepo.git.branch("integration"); build(project, Result.SUCCESS, "commitFileBase"); testRepo.git.checkout(null, "topic1"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertTrue(build1.getWorkspace().child(commitFile1).exists()); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); // do what the GitPublisher would do testRepo.git.deleteBranch("integration"); testRepo.git.checkout("topic1", "integration"); testRepo.git.checkout("master", "topic2"); final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); assertTrue(build2.getWorkspace().child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Issue("JENKINS-20392") @Test public void testMergeChangelog() throws Exception { FreeStyleProject project = setupSimpleProject("master"); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("*")), null, null, Collections.<GitSCMExtension>emptyList()); scm.getExtensions().add(new PreBuildMerge(new UserMergeOptions("origin", "integration", "default", MergeCommand.GitPluginFastForwardMode.FF))); addChangelogToBranchExtension(scm); project.setScm(scm); // create initial commit and then run the build against it: // Here the changelog is by default empty (because changelog for first commit is always empty commit("commitFileBase", johnDoe, "Initial Commit"); testRepo.git.branch("integration"); build(project, Result.SUCCESS, "commitFileBase"); // Create second commit and run build // Here the changelog should contain exactly this one new commit testRepo.git.checkout("master", "topic2"); final String commitFile2 = "commitFile2"; String commitMessage = "Commit number 2"; commit(commitFile2, johnDoe, commitMessage); final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); ChangeLogSet<? extends ChangeLogSet.Entry> changeLog = build2.getChangeSet(); assertEquals("Changelog should contain one item", 1, changeLog.getItems().length); GitChangeSet singleChange = (GitChangeSet) changeLog.getItems()[0]; assertEquals("Changelog should contain commit number 2", commitMessage, singleChange.getComment().trim()); } @Test public void testMergeWithAgent() throws Exception { FreeStyleProject project = setupSimpleProject("master"); project.setAssignedLabel(rule.createSlave().getSelfLabel()); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("*")), null, null, Collections.<GitSCMExtension>emptyList()); scm.getExtensions().add(new PreBuildMerge(new UserMergeOptions("origin", "integration", null, null))); addChangelogToBranchExtension(scm); project.setScm(scm); // create initial commit and then run the build against it: commit("commitFileBase", johnDoe, "Initial Commit"); testRepo.git.branch("integration"); build(project, Result.SUCCESS, "commitFileBase"); testRepo.git.checkout(null, "topic1"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertTrue(build1.getWorkspace().child(commitFile1).exists()); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); // do what the GitPublisher would do testRepo.git.deleteBranch("integration"); testRepo.git.checkout("topic1", "integration"); testRepo.git.checkout("master", "topic2"); final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); final FreeStyleBuild build2 = build(project, Result.SUCCESS, commitFile2); assertTrue(build2.getWorkspace().child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test public void testMergeFailed() throws Exception { FreeStyleProject project = setupSimpleProject("master"); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("*")), null, null, Collections.<GitSCMExtension>emptyList()); project.setScm(scm); scm.getExtensions().add(new PreBuildMerge(new UserMergeOptions("origin", "integration", "", MergeCommand.GitPluginFastForwardMode.FF))); addChangelogToBranchExtension(scm); // create initial commit and then run the build against it: commit("commitFileBase", johnDoe, "Initial Commit"); testRepo.git.branch("integration"); build(project, Result.SUCCESS, "commitFileBase"); testRepo.git.checkout(null, "topic1"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertTrue(build1.getWorkspace().child(commitFile1).exists()); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); // do what the GitPublisher would do testRepo.git.deleteBranch("integration"); testRepo.git.checkout("topic1", "integration"); testRepo.git.checkout("master", "topic2"); commit(commitFile1, "other content", johnDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); rule.buildAndAssertStatus(Result.FAILURE, project); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Issue("JENKINS-25191") @Test public void testMultipleMergeFailed() throws Exception { FreeStyleProject project = setupSimpleProject("master"); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("master")), null, null, Collections.<GitSCMExtension>emptyList()); project.setScm(scm); scm.getExtensions().add(new PreBuildMerge(new UserMergeOptions("origin", "integration1", "", MergeCommand.GitPluginFastForwardMode.FF))); scm.getExtensions().add(new PreBuildMerge(new UserMergeOptions("origin", "integration2", "", MergeCommand.GitPluginFastForwardMode.FF))); addChangelogToBranchExtension(scm); commit("dummyFile", johnDoe, "Initial Commit"); testRepo.git.branch("integration1"); testRepo.git.branch("integration2"); build(project, Result.SUCCESS); final String commitFile = "commitFile"; testRepo.git.checkoutBranch("integration1","master"); commit(commitFile,"abc", johnDoe, "merge conflict with integration2"); testRepo.git.checkoutBranch("integration2","master"); commit(commitFile,"cde", johnDoe, "merge conflict with integration1"); final FreeStyleBuild build = build(project, Result.FAILURE); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test public void testMergeFailedWithAgent() throws Exception { FreeStyleProject project = setupSimpleProject("master"); project.setAssignedLabel(rule.createSlave().getSelfLabel()); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("*")), null, null, Collections.<GitSCMExtension>emptyList()); scm.getExtensions().add(new PreBuildMerge(new UserMergeOptions("origin", "integration", null, null))); addChangelogToBranchExtension(scm); project.setScm(scm); // create initial commit and then run the build against it: commit("commitFileBase", johnDoe, "Initial Commit"); testRepo.git.branch("integration"); build(project, Result.SUCCESS, "commitFileBase"); testRepo.git.checkout(null, "topic1"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertTrue(build1.getWorkspace().child(commitFile1).exists()); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); // do what the GitPublisher would do testRepo.git.deleteBranch("integration"); testRepo.git.checkout("topic1", "integration"); testRepo.git.checkout("master", "topic2"); commit(commitFile1, "other content", johnDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); rule.buildAndAssertStatus(Result.FAILURE, project); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test public void testMergeWithMatrixBuild() throws Exception { //Create a matrix project and a couple of axes MatrixProject project = rule.jenkins.createProject(MatrixProject.class, "xyz"); project.setAxes(new AxisList(new Axis("VAR","a","b"))); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("*")), null, null, Collections.<GitSCMExtension>emptyList()); scm.getExtensions().add(new PreBuildMerge(new UserMergeOptions("origin", "integration", null, null))); addChangelogToBranchExtension(scm); project.setScm(scm); // create initial commit and then run the build against it: commit("commitFileBase", johnDoe, "Initial Commit"); testRepo.git.branch("integration"); build(project, Result.SUCCESS, "commitFileBase"); testRepo.git.checkout(null, "topic1"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final MatrixBuild build1 = build(project, Result.SUCCESS, commitFile1); assertTrue(build1.getWorkspace().child(commitFile1).exists()); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); // do what the GitPublisher would do testRepo.git.deleteBranch("integration"); testRepo.git.checkout("topic1", "integration"); testRepo.git.checkout("master", "topic2"); final String commitFile2 = "commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); assertTrue("scm polling did not detect commit2 change", project.poll(listener).hasChanges()); final MatrixBuild build2 = build(project, Result.SUCCESS, commitFile2); assertTrue(build2.getWorkspace().child(commitFile2).exists()); rule.assertBuildStatusSuccess(build2); assertFalse("scm polling should not detect any more changes after build", project.poll(listener).hasChanges()); } @Test public void testEnvironmentVariableExpansion() throws Exception { FreeStyleProject project = createFreeStyleProject(); project.setScm(new GitSCM("${CAT}"+testRepo.gitDir.getPath())); // create initial commit and then run the build against it: commit("a.txt", johnDoe, "Initial Commit"); build(project, Result.SUCCESS, "a.txt"); PollingResult r = project.poll(StreamTaskListener.fromStdout()); assertFalse(r.hasChanges()); commit("b.txt", johnDoe, "Another commit"); r = project.poll(StreamTaskListener.fromStdout()); assertTrue(r.hasChanges()); build(project, Result.SUCCESS, "b.txt"); } @TestExtension("testEnvironmentVariableExpansion") public static class SupplySomeEnvVars extends EnvironmentContributor { @Override public void buildEnvironmentFor(Run r, EnvVars envs, TaskListener listener) throws IOException, InterruptedException { envs.put("CAT",""); } } private List<UserRemoteConfig> createRepoList(String url) { List<UserRemoteConfig> repoList = new ArrayList<>(); repoList.add(new UserRemoteConfig(url, null, null, null)); return repoList; } /* * Makes sure that git browser URL is preserved across config round trip. */ @Issue("JENKINS-22604") @Test public void testConfigRoundtripURLPreserved() throws Exception { /* Long running test of low value on Windows */ /* Only run on non-Windows and approximately 50% of test runs */ /* On Windows, it requires 24 seconds before test finishes */ if (isWindows() || random.nextBoolean()) { return; } FreeStyleProject p = createFreeStyleProject(); final String url = "https://github.com/jenkinsci/jenkins"; GitRepositoryBrowser browser = new GithubWeb(url); GitSCM scm = new GitSCM(createRepoList(url), Collections.singletonList(new BranchSpec("")), browser, null, null); p.setScm(scm); rule.configRoundtrip(p); rule.assertEqualDataBoundBeans(scm,p.getScm()); assertEquals("Wrong key", "git " + url, scm.getKey()); } /* * Makes sure that git extensions are preserved across config round trip. */ @Issue("JENKINS-33695") @Test public void testConfigRoundtripExtensionsPreserved() throws Exception { /* Long running test of low value on Windows */ /* Only run on non-Windows and approximately 50% of test runs */ /* On Windows, it requires 26 seconds before test finishes */ if (isWindows() || random.nextBoolean()) { return; } FreeStyleProject p = createFreeStyleProject(); final String url = "https://github.com/jenkinsci/git-plugin.git"; GitRepositoryBrowser browser = new GithubWeb(url); GitSCM scm = new GitSCM(createRepoList(url), Collections.singletonList(new BranchSpec("*/master")), browser, null, null); p.setScm(scm); /* Assert that no extensions are loaded initially */ assertEquals(Collections.emptyList(), scm.getExtensions().toList()); /* Add LocalBranch extension */ LocalBranch localBranchExtension = new LocalBranch("**"); scm.getExtensions().add(localBranchExtension); assertTrue(scm.getExtensions().toList().contains(localBranchExtension)); /* Save the configuration */ rule.configRoundtrip(p); List<GitSCMExtension> extensions = scm.getExtensions().toList();; assertTrue(extensions.contains(localBranchExtension)); assertEquals("Wrong extension count before reload", 1, extensions.size()); /* Reload configuration from disc */ p.doReload(); GitSCM reloadedGit = (GitSCM) p.getScm(); List<GitSCMExtension> reloadedExtensions = reloadedGit.getExtensions().toList(); assertEquals("Wrong extension count after reload", 1, reloadedExtensions.size()); LocalBranch reloadedLocalBranch = (LocalBranch) reloadedExtensions.get(0); assertEquals(localBranchExtension.getLocalBranch(), reloadedLocalBranch.getLocalBranch()); } /* * Makes sure that the configuration form works. */ @Test public void testConfigRoundtrip() throws Exception { /* Long running test of low value on Windows */ /* Only run on non-Windows and approximately 50% of test runs */ /* On Windows, it requires 20 seconds before test finishes */ if (isWindows() || random.nextBoolean()) { return; } FreeStyleProject p = createFreeStyleProject(); GitSCM scm = new GitSCM("https://github.com/jenkinsci/jenkins"); p.setScm(scm); rule.configRoundtrip(p); rule.assertEqualDataBoundBeans(scm,p.getScm()); } /* * Sample configuration that should result in no extensions at all */ @Test public void testDataCompatibility1() throws Exception { FreeStyleProject p = (FreeStyleProject) rule.jenkins.createProjectFromXML("foo", getClass().getResourceAsStream("GitSCMTest/old1.xml")); GitSCM oldGit = (GitSCM) p.getScm(); assertEquals(Collections.emptyList(), oldGit.getExtensions().toList()); assertEquals(0, oldGit.getSubmoduleCfg().size()); assertEquals("git https://github.com/jenkinsci/model-ant-project.git", oldGit.getKey()); assertThat(oldGit.getEffectiveBrowser(), instanceOf(GithubWeb.class)); GithubWeb browser = (GithubWeb) oldGit.getEffectiveBrowser(); assertEquals(browser.getRepoUrl(), "https://github.com/jenkinsci/model-ant-project.git/"); } /** * Test a pipeline getting the value from several checkout steps gets the latest data everytime. * @throws Exception If anything wrong happens */ @Issue("JENKINS-53346") @Test public void testCheckoutReturnsLatestValues() throws Exception { /* Exit test early if running on Windows and path will be too long */ /* Known limitation of git for Windows 2.28.0 and earlier */ /* Needs a longpath fix in git for Windows */ String currentDirectoryPath = new File(".").getCanonicalPath(); if (isWindows() && currentDirectoryPath.length() > 95) { return; } WorkflowJob p = rule.jenkins.createProject(WorkflowJob.class, "pipeline-checkout-3-tags"); p.setDefinition(new CpsFlowDefinition( "node {\n" + " def tokenBranch = ''\n" + " def tokenRevision = ''\n" + " def checkout1 = checkout([$class: 'GitSCM', branches: [[name: 'git-1.1']], extensions: [], userRemoteConfigs: [[url: 'https://github.com/jenkinsci/git-plugin.git']]])\n" + " echo \"checkout1: ${checkout1}\"\n" + " tokenBranch = tm '${GIT_BRANCH}'\n" + " tokenRevision = tm '${GIT_REVISION}'\n" + " echo \"token1: ${tokenBranch}\"\n" + " echo \"revision1: ${tokenRevision}\"\n" + " def checkout2 = checkout([$class: 'GitSCM', branches: [[name: 'git-2.0.2']], extensions: [], userRemoteConfigs: [[url: 'https://github.com/jenkinsci/git-plugin.git']]])\n" + " echo \"checkout2: ${checkout2}\"\n" + " tokenBranch = tm '${GIT_BRANCH,all=true}'\n" + " tokenRevision = tm '${GIT_REVISION,length=8}'\n" + " echo \"token2: ${tokenBranch}\"\n" + " echo \"revision2: ${tokenRevision}\"\n" + " def checkout3 = checkout([$class: 'GitSCM', branches: [[name: 'git-3.0.0']], extensions: [], userRemoteConfigs: [[url: 'https://github.com/jenkinsci/git-plugin.git']]])\n" + " echo \"checkout3: ${checkout3}\"\n" + " tokenBranch = tm '${GIT_BRANCH,fullName=true}'\n" + " tokenRevision = tm '${GIT_REVISION,length=6}'\n" + " echo \"token3: ${tokenBranch}\"\n" + " echo \"revision3: ${tokenRevision}\"\n" + "}", true)); WorkflowRun b = rule.buildAndAssertSuccess(p); String log = b.getLog(); // The getLineStartsWith is to ease reading the test failure, to avoid Hamcrest shows all the log assertThat(getLineStartsWith(log, "checkout1:"), containsString("checkout1: [GIT_BRANCH:git-1.1, GIT_COMMIT:82db9509c068f60c41d7a4572c0114cc6d23cd0d, GIT_URL:https://github.com/jenkinsci/git-plugin.git]")); assertThat(getLineStartsWith(log, "checkout2:"), containsString("checkout2: [GIT_BRANCH:git-2.0.2, GIT_COMMIT:377a0fdbfbf07f70a3e9a566d749b2a185909c33, GIT_URL:https://github.com/jenkinsci/git-plugin.git]")); assertThat(getLineStartsWith(log, "checkout3:"), containsString("checkout3: [GIT_BRANCH:git-3.0.0, GIT_COMMIT:858dee578b79ac6683419faa57a281ccb9d347aa, GIT_URL:https://github.com/jenkinsci/git-plugin.git]")); assertThat(getLineStartsWith(log, "token1:"), containsString("token1: git-1.1")); assertThat(getLineStartsWith(log, "token2:"), containsString("token2: git-1.1")); // Unexpected but current behavior assertThat(getLineStartsWith(log, "token3:"), containsString("token3: git-1.1")); // Unexpected but current behavior assertThat(getLineStartsWith(log, "revision1:"), containsString("revision1: 82db9509c068f60c41d7a4572c0114cc6d23cd0d")); assertThat(getLineStartsWith(log, "revision2:"), containsString("revision2: 82db9509")); // Unexpected but current behavior - should be 377a0fdb assertThat(getLineStartsWith(log, "revision3:"), containsString("revision3: 82db95")); // Unexpected but current behavior - should be 858dee } private String getLineStartsWith(String text, String startOfLine) { try (Scanner scanner = new Scanner(text)) { while(scanner.hasNextLine()) { String line = scanner.nextLine(); if (line.startsWith(startOfLine)) { return line; } } } return ""; } @Test public void testPleaseDontContinueAnyway() throws Exception { /* Wastes time waiting for the build to fail */ /* Only run on non-Windows and approximately 50% of test runs */ /* On Windows, it requires 150 seconds before test finishes */ if (isWindows() || random.nextBoolean()) { return; } // create an empty repository with some commits testRepo.commit("a","foo",johnDoe, "added"); FreeStyleProject p = createFreeStyleProject(); p.setScm(new GitSCM(testRepo.gitDir.getAbsolutePath())); rule.buildAndAssertSuccess(p); // this should fail as it fails to fetch p.setScm(new GitSCM("http://localhost:4321/no/such/repository.git")); rule.buildAndAssertStatus(Result.FAILURE, p); } @Issue("JENKINS-19108") @Test public void testCheckoutToSpecificBranch() throws Exception { FreeStyleProject p = createFreeStyleProject(); GitSCM oldGit = new GitSCM("https://github.com/jenkinsci/model-ant-project.git/"); setupJGit(oldGit); oldGit.getExtensions().add(new LocalBranch("master")); p.setScm(oldGit); FreeStyleBuild b = rule.buildAndAssertSuccess(p); GitClient gc = Git.with(StreamTaskListener.fromStdout(),null).in(b.getWorkspace()).getClient(); gc.withRepository(new RepositoryCallback<Void>() { public Void invoke(Repository repo, VirtualChannel channel) throws IOException, InterruptedException { Ref head = repo.findRef("HEAD"); assertTrue("Detached HEAD",head.isSymbolic()); Ref t = head.getTarget(); assertEquals(t.getName(),"refs/heads/master"); return null; } }); } /** * Verifies that if project specifies LocalBranch with value of "**" * that the checkout to a local branch using remote branch name sans 'origin'. * This feature is necessary to support Maven release builds that push updated * pom.xml to remote branch as * <pre> * git push origin localbranch:localbranch * </pre> * @throws Exception on error */ @Test public void testCheckoutToDefaultLocalBranch_StarStar() throws Exception { FreeStyleProject project = setupSimpleProject("master"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); GitSCM git = (GitSCM)project.getScm(); git.getExtensions().add(new LocalBranch("**")); FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertEquals("GIT_BRANCH", "origin/master", getEnvVars(project).get(GitSCM.GIT_BRANCH)); assertEquals("GIT_LOCAL_BRANCH", "master", getEnvVars(project).get(GitSCM.GIT_LOCAL_BRANCH)); } /** * Verifies that if project specifies LocalBranch with null value (empty string) * that the checkout to a local branch using remote branch name sans 'origin'. * This feature is necessary to support Maven release builds that push updated * pom.xml to remote branch as * <pre> * git push origin localbranch:localbranch * </pre> * @throws Exception on error */ @Test public void testCheckoutToDefaultLocalBranch_NULL() throws Exception { FreeStyleProject project = setupSimpleProject("master"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); GitSCM git = (GitSCM)project.getScm(); git.getExtensions().add(new LocalBranch("")); FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertEquals("GIT_BRANCH", "origin/master", getEnvVars(project).get(GitSCM.GIT_BRANCH)); assertEquals("GIT_LOCAL_BRANCH", "master", getEnvVars(project).get(GitSCM.GIT_LOCAL_BRANCH)); } /* * Verifies that GIT_LOCAL_BRANCH is not set if LocalBranch extension * is not configured. */ @Test public void testCheckoutSansLocalBranchExtension() throws Exception { FreeStyleProject project = setupSimpleProject("master"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertEquals("GIT_BRANCH", "origin/master", getEnvVars(project).get(GitSCM.GIT_BRANCH)); assertEquals("GIT_LOCAL_BRANCH", null, getEnvVars(project).get(GitSCM.GIT_LOCAL_BRANCH)); } /* * Verifies that GIT_CHECKOUT_DIR is set to "checkoutDir" if RelativeTargetDirectory extension * is configured. */ @Test public void testCheckoutRelativeTargetDirectoryExtension() throws Exception { FreeStyleProject project = setupProject("master", false, "checkoutDir"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); GitSCM git = (GitSCM)project.getScm(); git.getExtensions().add(new RelativeTargetDirectory("checkoutDir")); FreeStyleBuild build1 = build(project, "checkoutDir", Result.SUCCESS, commitFile1); assertEquals("GIT_CHECKOUT_DIR", "checkoutDir", getEnvVars(project).get(GitSCM.GIT_CHECKOUT_DIR)); } /* * Verifies that GIT_CHECKOUT_DIR is not set if RelativeTargetDirectory extension * is not configured. */ @Test public void testCheckoutSansRelativeTargetDirectoryExtension() throws Exception { FreeStyleProject project = setupSimpleProject("master"); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); assertEquals("GIT_CHECKOUT_DIR", null, getEnvVars(project).get(GitSCM.GIT_CHECKOUT_DIR)); } @Test public void testCheckoutFailureIsRetryable() throws Exception { FreeStyleProject project = setupSimpleProject("master"); // run build first to create workspace final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final FreeStyleBuild build1 = build(project, Result.SUCCESS, commitFile1); final String commitFile2 = "commitFile2"; commit(commitFile2, janeDoe, "Commit number 2"); // create lock file to simulate lock collision File lock = new File(build1.getWorkspace().getRemote(), ".git/index.lock"); try { FileUtils.touch(lock); final FreeStyleBuild build2 = build(project, Result.FAILURE); rule.waitForMessage("java.io.IOException: Could not checkout", build2); } finally { lock.delete(); } } @Test public void testInitSparseCheckout() throws Exception { if (!sampleRepo.gitVersionAtLeast(1, 7, 10)) { /* Older git versions have unexpected behaviors with sparse checkout */ return; } FreeStyleProject project = setupProject("master", Collections.singletonList(new SparseCheckoutPath("toto"))); // run build first to create workspace final String commitFile1 = "toto/commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final String commitFile2 = "titi/commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); final FreeStyleBuild build1 = build(project, Result.SUCCESS); assertTrue(build1.getWorkspace().child("toto").exists()); assertTrue(build1.getWorkspace().child(commitFile1).exists()); assertFalse(build1.getWorkspace().child("titi").exists()); assertFalse(build1.getWorkspace().child(commitFile2).exists()); } @Test public void testInitSparseCheckoutBis() throws Exception { if (!sampleRepo.gitVersionAtLeast(1, 7, 10)) { /* Older git versions have unexpected behaviors with sparse checkout */ return; } FreeStyleProject project = setupProject("master", Collections.singletonList(new SparseCheckoutPath("titi"))); // run build first to create workspace final String commitFile1 = "toto/commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final String commitFile2 = "titi/commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); final FreeStyleBuild build1 = build(project, Result.SUCCESS); assertTrue(build1.getWorkspace().child("titi").exists()); assertTrue(build1.getWorkspace().child(commitFile2).exists()); assertFalse(build1.getWorkspace().child("toto").exists()); assertFalse(build1.getWorkspace().child(commitFile1).exists()); } @Test public void testSparseCheckoutAfterNormalCheckout() throws Exception { if (!sampleRepo.gitVersionAtLeast(1, 7, 10)) { /* Older git versions have unexpected behaviors with sparse checkout */ return; } FreeStyleProject project = setupSimpleProject("master"); // run build first to create workspace final String commitFile1 = "toto/commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final String commitFile2 = "titi/commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); final FreeStyleBuild build1 = build(project, Result.SUCCESS); assertTrue(build1.getWorkspace().child("titi").exists()); assertTrue(build1.getWorkspace().child(commitFile2).exists()); assertTrue(build1.getWorkspace().child("toto").exists()); assertTrue(build1.getWorkspace().child(commitFile1).exists()); ((GitSCM) project.getScm()).getExtensions().add(new SparseCheckoutPaths(Collections.singletonList(new SparseCheckoutPath("titi")))); final FreeStyleBuild build2 = build(project, Result.SUCCESS); assertTrue(build2.getWorkspace().child("titi").exists()); assertTrue(build2.getWorkspace().child(commitFile2).exists()); assertFalse(build2.getWorkspace().child("toto").exists()); assertFalse(build2.getWorkspace().child(commitFile1).exists()); } @Test public void testNormalCheckoutAfterSparseCheckout() throws Exception { if (!sampleRepo.gitVersionAtLeast(1, 7, 10)) { /* Older git versions have unexpected behaviors with sparse checkout */ return; } FreeStyleProject project = setupProject("master", Collections.singletonList(new SparseCheckoutPath("titi"))); // run build first to create workspace final String commitFile1 = "toto/commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final String commitFile2 = "titi/commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); final FreeStyleBuild build2 = build(project, Result.SUCCESS); assertTrue(build2.getWorkspace().child("titi").exists()); assertTrue(build2.getWorkspace().child(commitFile2).exists()); assertFalse(build2.getWorkspace().child("toto").exists()); assertFalse(build2.getWorkspace().child(commitFile1).exists()); ((GitSCM) project.getScm()).getExtensions().remove(SparseCheckoutPaths.class); final FreeStyleBuild build1 = build(project, Result.SUCCESS); assertTrue(build1.getWorkspace().child("titi").exists()); assertTrue(build1.getWorkspace().child(commitFile2).exists()); assertTrue(build1.getWorkspace().child("toto").exists()); assertTrue(build1.getWorkspace().child(commitFile1).exists()); } @Test public void testInitSparseCheckoutOverAgent() throws Exception { if (!sampleRepo.gitVersionAtLeast(1, 7, 10)) { /* Older git versions have unexpected behaviors with sparse checkout */ return; } FreeStyleProject project = setupProject("master", Collections.singletonList(new SparseCheckoutPath("titi"))); project.setAssignedLabel(rule.createSlave().getSelfLabel()); // run build first to create workspace final String commitFile1 = "toto/commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); final String commitFile2 = "titi/commitFile2"; commit(commitFile2, johnDoe, "Commit number 2"); final FreeStyleBuild build1 = build(project, Result.SUCCESS); assertTrue(build1.getWorkspace().child("titi").exists()); assertTrue(build1.getWorkspace().child(commitFile2).exists()); assertFalse(build1.getWorkspace().child("toto").exists()); assertFalse(build1.getWorkspace().child(commitFile1).exists()); } @Test @Issue("JENKINS-22009") public void testPolling_environmentValueInBranchSpec() throws Exception { // create parameterized project with environment value in branch specification FreeStyleProject project = createFreeStyleProject(); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("${MY_BRANCH}")), null, null, Collections.<GitSCMExtension>emptyList()); project.setScm(scm); project.addProperty(new ParametersDefinitionProperty(new StringParameterDefinition("MY_BRANCH", "master"))); // commit something in order to create an initial base version in git commit("toto/commitFile1", johnDoe, "Commit number 1"); // build the project build(project, Result.SUCCESS); assertFalse("No changes to git since last build, thus no new build is expected", project.poll(listener).hasChanges()); } @Issue("JENKINS-29066") public void baseTestPolling_parentHead(List<GitSCMExtension> extensions) throws Exception { // create parameterized project with environment value in branch specification FreeStyleProject project = createFreeStyleProject(); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("**")), null, null, extensions); project.setScm(scm); // commit something in order to create an initial base version in git commit("toto/commitFile1", johnDoe, "Commit number 1"); git.branch("someBranch"); commit("toto/commitFile2", johnDoe, "Commit number 2"); assertTrue("polling should detect changes",project.poll(listener).hasChanges()); // build the project build(project, Result.SUCCESS); /* Expects 1 build because the build of someBranch incorporates all * the changes from the master branch as well as the changes from someBranch. */ assertEquals("Wrong number of builds", 1, project.getBuilds().size()); assertFalse("polling should not detect changes",project.poll(listener).hasChanges()); } @Issue("JENKINS-29066") @Test public void testPolling_parentHead() throws Exception { baseTestPolling_parentHead(Collections.<GitSCMExtension>emptyList()); } @Issue("JENKINS-29066") @Test public void testPolling_parentHead_DisableRemotePoll() throws Exception { baseTestPolling_parentHead(Collections.<GitSCMExtension>singletonList(new DisableRemotePoll())); } @Test public void testPollingAfterManualBuildWithParametrizedBranchSpec() throws Exception { // create parameterized project with environment value in branch specification FreeStyleProject project = createFreeStyleProject(); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("${MY_BRANCH}")), null, null, Collections.<GitSCMExtension>emptyList()); project.setScm(scm); project.addProperty(new ParametersDefinitionProperty(new StringParameterDefinition("MY_BRANCH", "trackedbranch"))); // Initial commit to master commit("file1", johnDoe, "Initial Commit"); // Create the branches git.branch("trackedbranch"); git.branch("manualbranch"); final StringParameterValue branchParam = new StringParameterValue("MY_BRANCH", "manualbranch"); final Action[] actions = {new ParametersAction(branchParam)}; FreeStyleBuild build = project.scheduleBuild2(0, new Cause.UserIdCause(), actions).get(); rule.assertBuildStatus(Result.SUCCESS, build); assertFalse("No changes to git since last build", project.poll(listener).hasChanges()); git.checkout("manualbranch"); commit("file2", johnDoe, "Commit to manually build branch"); assertFalse("No changes to tracked branch", project.poll(listener).hasChanges()); git.checkout("trackedbranch"); commit("file3", johnDoe, "Commit to tracked branch"); assertTrue("A change should be detected in tracked branch", project.poll(listener).hasChanges()); } private final class FakeParametersAction implements EnvironmentContributingAction, Serializable { // Test class for testPolling_environmentValueAsEnvironmentContributingAction test case final ParametersAction m_forwardingAction; public FakeParametersAction(StringParameterValue params) { this.m_forwardingAction = new ParametersAction(params); } @Deprecated public void buildEnvVars(AbstractBuild<?, ?> ab, EnvVars ev) { this.m_forwardingAction.buildEnvVars(ab, ev); } public String getIconFileName() { return this.m_forwardingAction.getIconFileName(); } public String getDisplayName() { return this.m_forwardingAction.getDisplayName(); } public String getUrlName() { return this.m_forwardingAction.getUrlName(); } public List<ParameterValue> getParameters() { return this.m_forwardingAction.getParameters(); } private void writeObject(java.io.ObjectOutputStream out) throws IOException { } private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { } private void readObjectNoData() throws ObjectStreamException { } } @Test public void testPolling_CanDoRemotePollingIfOneBranchButMultipleRepositories() throws Exception { FreeStyleProject project = createFreeStyleProject(); List<UserRemoteConfig> remoteConfigs = new ArrayList<>(); remoteConfigs.add(new UserRemoteConfig(testRepo.gitDir.getAbsolutePath(), "origin", "", null)); remoteConfigs.add(new UserRemoteConfig(testRepo.gitDir.getAbsolutePath(), "someOtherRepo", "", null)); GitSCM scm = new GitSCM(remoteConfigs, Collections.singletonList(new BranchSpec("origin/master")), false, Collections.<SubmoduleConfig> emptyList(), null, null, Collections.<GitSCMExtension> emptyList()); project.setScm(scm); commit("commitFile1", johnDoe, "Commit number 1"); FreeStyleBuild first_build = project.scheduleBuild2(0, new Cause.UserIdCause()).get(); rule.assertBuildStatus(Result.SUCCESS, first_build); first_build.getWorkspace().deleteContents(); PollingResult pollingResult = scm.poll(project, null, first_build.getWorkspace(), listener, null); assertFalse(pollingResult.hasChanges()); } @Issue("JENKINS-24467") @Test public void testPolling_environmentValueAsEnvironmentContributingAction() throws Exception { // create parameterized project with environment value in branch specification FreeStyleProject project = createFreeStyleProject(); GitSCM scm = new GitSCM( createRemoteRepositories(), Collections.singletonList(new BranchSpec("${MY_BRANCH}")), null, null, Collections.<GitSCMExtension>emptyList()); project.setScm(scm); // Initial commit and build commit("toto/commitFile1", johnDoe, "Commit number 1"); String brokenPath = "\\broken/path\\of/doom"; if (!sampleRepo.gitVersionAtLeast(1, 8)) { /* Git 1.7.10.4 fails the first build unless the git-upload-pack * program is available in its PATH. * Later versions of git don't have that problem. */ final String systemPath = System.getenv("PATH"); brokenPath = systemPath + File.pathSeparator + brokenPath; } final StringParameterValue real_param = new StringParameterValue("MY_BRANCH", "master"); final StringParameterValue fake_param = new StringParameterValue("PATH", brokenPath); final Action[] actions = {new ParametersAction(real_param), new FakeParametersAction(fake_param)}; // SECURITY-170 - have to use ParametersDefinitionProperty project.addProperty(new ParametersDefinitionProperty(new StringParameterDefinition("MY_BRANCH", "master"))); FreeStyleBuild first_build = project.scheduleBuild2(0, new Cause.UserIdCause(), actions).get(); rule.assertBuildStatus(Result.SUCCESS, first_build); Launcher launcher = workspace.createLauncher(listener); final EnvVars environment = GitUtils.getPollEnvironment(project, workspace, launcher, listener); assertEquals(environment.get("MY_BRANCH"), "master"); assertNotSame("Environment path should not be broken path", environment.get("PATH"), brokenPath); } /** * Method performs HTTP get on "notifyCommit" URL, passing it commit by SHA1 * and tests for custom SCM name build data consistency. * @param project project to build * @param commit commit to build * @param expectedScmName Expected SCM name for commit. * @param ordinal number of commit to log into errors, if any * @param git git SCM * @throws Exception on error */ private int notifyAndCheckScmName(FreeStyleProject project, ObjectId commit, String expectedScmName, int ordinal, GitSCM git, ObjectId... priorCommits) throws Exception { String priorCommitIDs = ""; for (ObjectId priorCommit : priorCommits) { priorCommitIDs = priorCommitIDs + " " + priorCommit; } assertTrue("scm polling should detect commit " + ordinal, notifyCommit(project, commit)); final Build build = project.getLastBuild(); final BuildData buildData = git.getBuildData(build); assertEquals("Expected SHA1 != built SHA1 for commit " + ordinal + " priors:" + priorCommitIDs, commit, buildData .getLastBuiltRevision().getSha1()); assertEquals("Expected SHA1 != retrieved SHA1 for commit " + ordinal + " priors:" + priorCommitIDs, commit, buildData.getLastBuild(commit).getSHA1()); assertTrue("Commit " + ordinal + " not marked as built", buildData.hasBeenBuilt(commit)); assertEquals("Wrong SCM Name for commit " + ordinal, expectedScmName, buildData.getScmName()); return build.getNumber(); } private void checkNumberedBuildScmName(FreeStyleProject project, int buildNumber, String expectedScmName, GitSCM git) throws Exception { final BuildData buildData = git.getBuildData(project.getBuildByNumber(buildNumber)); assertEquals("Wrong SCM Name", expectedScmName, buildData.getScmName()); } /* * Tests that builds have the correctly specified branches, associated with * the commit id, passed with "notifyCommit" URL. */ @Ignore("Intermittent failures on stable-3.10 branch, not on stable-3.9 or master") @Issue("JENKINS-24133") // Flaky test distracting from primary focus // @Test public void testSha1NotificationBranches() throws Exception { final String branchName = "master"; final FreeStyleProject project = setupProject(branchName, false); project.addTrigger(new SCMTrigger("")); final GitSCM git = (GitSCM) project.getScm(); setupJGit(git); final String commitFile1 = "commitFile1"; commit(commitFile1, johnDoe, "Commit number 1"); assertTrue("scm polling should detect commit 1", project.poll(listener).hasChanges()); build(project, Result.SUCCESS, commitFile1); final ObjectId commit1 = testRepo.git.revListAll().get(0); notifyAndCheckBranch(project, commit1, branchName, 1, git); commit("commitFile2", johnDoe, "Commit number 2"); assertTrue("scm polling should detect commit 2", project.poll(listener).hasChanges()); final ObjectId commit2 = testRepo.git.revListAll().get(0); notifyAndCheckBranch(project, commit2, branchName, 2, git); notifyAndCheckBranch(project, commit1, branchName, 1, git); } /* A null pointer exception was detected because the plugin failed to * write a branch name to the build data, so there was a SHA1 recorded * in the build data, but no branch name. */ @Test @Deprecated // Testing deprecated buildEnvVars public void testNoNullPointerExceptionWithNullBranch() throws Exception { ObjectId sha1 = ObjectId.fromString("2cec153f34767f7638378735dc2b907ed251a67d"); /* This is the null that causes NPE */ Branch branch = new Branch(null, sha1); List<Branch> branchList = new ArrayList<>(); branchList.add(branch); Revision revision = new Revision(sha1, branchList); /* BuildData mock that will use the Revision with null branch name */ BuildData buildData = Mockito.mock(BuildData.class); Mockito.when(buildData.getLastBuiltRevision()).thenReturn(revision); Mockito.when(buildData.hasBeenReferenced(anyString())).thenReturn(true); /* List of build data that will be returned by the mocked BuildData */ List<BuildData> buildDataList = new ArrayList<>(); buildDataList.add(buildData); /* AbstractBuild mock which returns the buildDataList that contains a null branch name */ AbstractBuild build = Mockito.mock(AbstractBuild.class); Mockito.when(build.getActions(BuildData.class)).thenReturn(buildDataList); final FreeStyleProject project = setupProject("*/*", false); GitSCM scm = (GitSCM) project.getScm(); scm.buildEnvVars(build, new EnvVars()); // NPE here before fix applied /* Verify mocks were called as expected */ verify(buildData, times(1)).getLastBuiltRevision(); verify(buildData, times(1)).hasBeenReferenced(anyString()); verify(build, times(1)).getActions(BuildData.class); } @Test @Deprecated // Testing deprecated buildEnvVars public void testBuildEnvVarsLocalBranchStarStar() throws Exception { ObjectId sha1 = ObjectId.fromString("2cec153f34767f7638378735dc2b907ed251a67d"); /* This is the null that causes NPE */ Branch branch = new Branch("origin/master", sha1); List<Branch> branchList = new ArrayList<>(); branchList.add(branch); Revision revision = new Revision(sha1, branchList); /* BuildData mock that will use the Revision with null branch name */ BuildData buildData = Mockito.mock(BuildData.class); Mockito.when(buildData.getLastBuiltRevision()).thenReturn(revision); Mockito.when(buildData.hasBeenReferenced(anyString())).thenReturn(true); /* List of build data that will be returned by the mocked BuildData */ List<BuildData> buildDataList = new ArrayList<>(); buildDataList.add(buildData); /* AbstractBuild mock which returns the buildDataList that contains a null branch name */ AbstractBuild build = Mockito.mock(AbstractBuild.class); Mockito.when(build.getActions(BuildData.class)).thenReturn(buildDataList); final FreeStyleProject project = setupProject("*/*", false); GitSCM scm = (GitSCM) project.getScm(); scm.getExtensions().add(new LocalBranch("**")); EnvVars env = new EnvVars(); scm.buildEnvVars(build, env); // NPE here before fix applied assertEquals("GIT_BRANCH", "origin/master", env.get("GIT_BRANCH")); assertEquals("GIT_LOCAL_BRANCH", "master", env.get("GIT_LOCAL_BRANCH")); /* Verify mocks were called as expected */ verify(buildData, times(1)).getLastBuiltRevision(); verify(buildData, times(1)).hasBeenReferenced(anyString()); verify(build, times(1)).getActions(BuildData.class); } @Test @Deprecated // Testing deprecated buildEnvVars public void testBuildEnvVarsLocalBranchNull() throws Exception { ObjectId sha1 = ObjectId.fromString("2cec153f34767f7638378735dc2b907ed251a67d"); /* This is the null that causes NPE */ Branch branch = new Branch("origin/master", sha1); List<Branch> branchList = new ArrayList<>(); branchList.add(branch); Revision revision = new Revision(sha1, branchList); /* BuildData mock that will use the Revision with null branch name */ BuildData buildData = Mockito.mock(BuildData.class); Mockito.when(buildData.getLastBuiltRevision()).thenReturn(revision); Mockito.when(buildData.hasBeenReferenced(anyString())).thenReturn(true); /* List of build data that will be returned by the mocked BuildData */ List<BuildData> buildDataList = new ArrayList<>(); buildDataList.add(buildData); /* AbstractBuild mock which returns the buildDataList that contains a null branch name */ AbstractBuild build = Mockito.mock(AbstractBuild.class); Mockito.when(build.getActions(BuildData.class)).thenReturn(buildDataList); final FreeStyleProject project = setupProject("*/*", false); GitSCM scm = (GitSCM) project.getScm(); scm.getExtensions().add(new LocalBranch("")); EnvVars env = new EnvVars(); scm.buildEnvVars(build, env); // NPE here before fix applied assertEquals("GIT_BRANCH", "origin/master", env.get("GIT_BRANCH")); assertEquals("GIT_LOCAL_BRANCH", "master", env.get("GIT_LOCAL_BRANCH")); /* Verify mocks were called as expected */ verify(buildData, times(1)).getLastBuiltRevision(); verify(buildData, times(1)).hasBeenReferenced(anyString()); verify(build, times(1)).getActions(BuildData.class); } @Test @Deprecated // testing deprecated buildEnvVars public void testBuildEnvVarsLocalBranchNotSet() throws Exception { ObjectId sha1 = ObjectId.fromString("2cec153f34767f7638378735dc2b907ed251a67d"); /* This is the null that causes NPE */ Branch branch = new Branch("origin/master", sha1); List<Branch> branchList = new ArrayList<>(); branchList.add(branch); Revision revision = new Revision(sha1, branchList); /* BuildData mock that will use the Revision with null branch name */ BuildData buildData = Mockito.mock(BuildData.class); Mockito.when(buildData.getLastBuiltRevision()).thenReturn(revision); Mockito.when(buildData.hasBeenReferenced(anyString())).thenReturn(true); /* List of build data that will be returned by the mocked BuildData */ List<BuildData> buildDataList = new ArrayList<>(); buildDataList.add(buildData); /* AbstractBuild mock which returns the buildDataList that contains a null branch name */ AbstractBuild build = Mockito.mock(AbstractBuild.class); Mockito.when(build.getActions(BuildData.class)).thenReturn(buildDataList); final FreeStyleProject project = setupProject("*/*", false); GitSCM scm = (GitSCM) project.getScm(); EnvVars env = new EnvVars(); scm.buildEnvVars(build, env); // NPE here before fix applied assertEquals("GIT_BRANCH", "origin/master", env.get("GIT_BRANCH")); assertEquals("GIT_LOCAL_BRANCH", null, env.get("GIT_LOCAL_BRANCH")); /* Verify mocks were called as expected */ verify(buildData, times(1)).getLastBuiltRevision(); verify(buildData, times(1)).hasBeenReferenced(anyString()); verify(build, times(1)).getActions(BuildData.class); } @Test public void testBuildEnvironmentVariablesSingleRemote() throws Exception { ObjectId sha1 = ObjectId.fromString("2cec153f34767f7638378735dc2b907ed251a67d"); List<Branch> branchList = new ArrayList<>(); Branch branch = new Branch("origin/master", sha1); branchList.add(branch); Revision revision = new Revision(sha1, branchList); /* BuildData mock that will use the Revision */ BuildData buildData = Mockito.mock(BuildData.class); Mockito.when(buildData.getLastBuiltRevision()).thenReturn(revision); Mockito.when(buildData.hasBeenReferenced(anyString())).thenReturn(true); /* List of build data that will be returned by the mocked BuildData */ List<BuildData> buildDataList = new ArrayList<>(); buildDataList.add(buildData); /* Run mock which returns the buildDataList */ Run<?, ?> build = Mockito.mock(Run.class); Mockito.when(build.getActions(BuildData.class)).thenReturn(buildDataList); FreeStyleProject project = setupSimpleProject("*/*"); GitSCM scm = (GitSCM) project.getScm(); Map<String, String> env = new HashMap<>(); scm.buildEnvironment(build, env); assertEquals("GIT_BRANCH is invalid", "origin/master", env.get("GIT_BRANCH")); assertEquals("GIT_LOCAL_BRANCH is invalid", null, env.get("GIT_LOCAL_BRANCH")); assertEquals("GIT_COMMIT is invalid", sha1.getName(), env.get("GIT_COMMIT")); assertEquals("GIT_URL is invalid", testRepo.gitDir.getAbsolutePath(), env.get("GIT_URL")); assertNull("GIT_URL_1 should not have been set", env.get("GIT_URL_1")); } @Test public void testBuildEnvironmentVariablesMultipleRemotes() throws Exception { ObjectId sha1 = ObjectId.fromString("2cec153f34767f7638378735dc2b907ed251a67d"); List<Branch> branchList = new ArrayList<>(); Branch branch = new Branch("origin/master", sha1); branchList.add(branch); Revision revision = new Revision(sha1, branchList); /* BuildData mock that will use the Revision */ BuildData buildData = Mockito.mock(BuildData.class); Mockito.when(buildData.getLastBuiltRevision()).thenReturn(revision); Mockito.when(buildData.hasBeenReferenced(anyString())).thenReturn(true); /* List of build data that will be returned by the mocked BuildData */ List<BuildData> buildDataList = new ArrayList<>(); buildDataList.add(buildData); /* Run mock which returns the buildDataList */ Run<?, ?> build = Mockito.mock(Run.class); Mockito.when(build.getActions(BuildData.class)).thenReturn(buildDataList); FreeStyleProject project = setupSimpleProject("*/*"); /* Update project so we have two remote configs */ List<UserRemoteConfig> userRemoteConfigs = new ArrayList<>(); userRemoteConfigs.add(new UserRemoteConfig(testRepo.gitDir.getAbsolutePath(), "origin", "", null)); final String upstreamRepoUrl = "/upstream/url"; userRemoteConfigs.add(new UserRemoteConfig(upstreamRepoUrl, "upstream", "", null)); GitSCM scm = new GitSCM( userRemoteConfigs, Collections.singletonList(new BranchSpec(branch.getName())), null, null, Collections.<GitSCMExtension>emptyList()); project.setScm(scm); Map<String, String> env = new HashMap<>(); scm.buildEnvironment(build, env); assertEquals("GIT_BRANCH is invalid", "origin/master", env.get("GIT_BRANCH")); assertEquals("GIT_LOCAL_BRANCH is invalid", null, env.get("GIT_LOCAL_BRANCH")); assertEquals("GIT_COMMIT is invalid", sha1.getName(), env.get("GIT_COMMIT")); assertEquals("GIT_URL is invalid", testRepo.gitDir.getAbsolutePath(), env.get("GIT_URL")); assertEquals("GIT_URL_1 is invalid", testRepo.gitDir.getAbsolutePath(), env.get("GIT_URL_1")); assertEquals("GIT_URL_2 is invalid", upstreamRepoUrl, env.get("GIT_URL_2")); assertNull("GIT_URL_3 should not have been set", env.get("GIT_URL_3")); } @Issue("JENKINS-38241") @Test public void testCommitMessageIsPrintedToLogs() throws Exception { sampleRepo.init(); sampleRepo.write("file", "v1"); sampleRepo.git("commit", "--all", "--message=test commit"); FreeStyleProject p = setupSimpleProject("master"); Run<?,?> run = rule.buildAndAssertSuccess(p); rule.waitForMessage("Commit message: \"test commit\"", run); } /** * Method performs HTTP get on "notifyCommit" URL, passing it commit by SHA1 * and tests for build data consistency. * @param project project to build * @param commit commit to build * @param expectedBranch branch, that is expected to be built * @param ordinal number of commit to log into errors, if any * @param git git SCM * @throws Exception on error */ private void notifyAndCheckBranch(FreeStyleProject project, ObjectId commit, String expectedBranch, int ordinal, GitSCM git) throws Exception { assertTrue("scm polling should detect commit " + ordinal, notifyCommit(project, commit)); final BuildData buildData = git.getBuildData(project.getLastBuild()); final Collection<Branch> builtBranches = buildData.lastBuild.getRevision().getBranches(); assertEquals("Commit " + ordinal + " should be built", commit, buildData .getLastBuiltRevision().getSha1()); final String expectedBranchString = "origin/" + expectedBranch; assertFalse("Branches should be detected for the build", builtBranches.isEmpty()); assertEquals(expectedBranch + " branch should be detected", expectedBranchString, builtBranches.iterator().next().getName()); assertEquals(expectedBranchString, getEnvVars(project).get(GitSCM.GIT_BRANCH)); } /** * Method performs commit notification for the last committed SHA1 using * notifyCommit URL. * @param project project to trigger * @return whether the new build has been triggered (<code>true</code>) or * not (<code>false</code>). * @throws Exception on error */ private boolean notifyCommit(FreeStyleProject project, ObjectId commitId) throws Exception { final int initialBuildNumber = project.getLastBuild().getNumber(); final String commit1 = ObjectId.toString(commitId); final String notificationPath = rule.getURL().toExternalForm() + "git/notifyCommit?url=" + testRepo.gitDir.toString() + "&sha1=" + commit1; final URL notifyUrl = new URL(notificationPath); String notifyContent = null; try (final InputStream is = notifyUrl.openStream()) { notifyContent = IOUtils.toString(is, "UTF-8"); } assertThat(notifyContent, containsString("No Git consumers using SCM API plugin for: " + testRepo.gitDir.toString())); if ((project.getLastBuild().getNumber() == initialBuildNumber) && (rule.jenkins.getQueue().isEmpty())) { return false; } else { while (!rule.jenkins.getQueue().isEmpty()) { Thread.sleep(100); } final FreeStyleBuild build = project.getLastBuild(); while (build.isBuilding()) { Thread.sleep(100); } return true; } } private void setupJGit(GitSCM git) { git.gitTool="jgit"; rule.jenkins.getDescriptorByType(GitTool.DescriptorImpl.class).setInstallations(new JGitTool(Collections.<ToolProperty<?>>emptyList())); } /** We clean the environment, just in case the test is being run from a Jenkins job using this same plugin :). */ @TestExtension public static class CleanEnvironment extends EnvironmentContributor { @Override public void buildEnvironmentFor(Run run, EnvVars envs, TaskListener listener) { envs.remove(GitSCM.GIT_BRANCH); envs.remove(GitSCM.GIT_LOCAL_BRANCH); envs.remove(GitSCM.GIT_COMMIT); envs.remove(GitSCM.GIT_PREVIOUS_COMMIT); envs.remove(GitSCM.GIT_PREVIOUS_SUCCESSFUL_COMMIT); } } /** Returns true if test cleanup is not reliable */ private boolean cleanupIsUnreliable() { // Windows cleanup is unreliable on ci.jenkins.io String jobUrl = System.getenv("JOB_URL"); return isWindows() && jobUrl != null && jobUrl.contains("ci.jenkins.io"); } /** inline ${@link hudson.Functions#isWindows()} to prevent a transient remote classloader issue */ private boolean isWindows() { return java.io.File.pathSeparatorChar==';'; } private StandardCredentials createCredential(CredentialsScope scope, String id) { return new UsernamePasswordCredentialsImpl(scope, id, "desc: " + id, "username", "password"); } }
package org.cojen.tupl; import java.io.IOException; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import org.junit.*; import static org.junit.Assert.*; import org.cojen.tupl.ext.RecoveryHandler; import static org.cojen.tupl.TestUtils.*; /** * Tests for the Transaction.prepare method. * * @author Brian S O'Neill */ public class TxnPrepareTest { public static void main(String[] args) throws Exception { org.junit.runner.JUnitCore.main(TxnPrepareTest.class.getName()); } @After public void teardown() throws Exception { deleteTempDatabases(getClass()); } protected DatabaseConfig newConfig(RecoveryHandler handler) { return new DatabaseConfig() .recoveryHandler(handler) .directPageAccess(false) .lockTimeout(5000, TimeUnit.MILLISECONDS) .checkpointRate(-1, null); } protected Database newTempDatabase(DatabaseConfig config) throws Exception { return TestUtils.newTempDatabase(getClass(), config); } @Test public void noHandler() throws Exception { Database db = newTempDatabase(newConfig(null)); Transaction txn = db.newTransaction(); try { txn.prepare(); fail(); } catch (IllegalStateException e) { } } @Test public void noRedo() throws Exception { try { Transaction.BOGUS.prepare(); fail(); } catch (IllegalStateException e) { } RecoveryHandler handler = new RecoveryHandler() { public void init(Database db) {} public void recover(Transaction txn) {} }; Database db = newTempDatabase(newConfig(handler)); Transaction txn = db.newTransaction(); txn.durabilityMode(DurabilityMode.NO_REDO); try { txn.prepare(); fail(); } catch (IllegalStateException e) { } } // Test transaction recovery from just the redo log... @Test public void basicRedoRecoveryNoAction() throws Exception { basicRecovery("redo", "none"); } @Test public void basicRedoRecoveryReset() throws Exception { basicRecovery("redo", "reset"); } @Test public void basicRedoRecoveryModifyReset() throws Exception { basicRecovery("redo", "modify-reset"); } @Test public void basicRedoRecoveryCommit() throws Exception { basicRecovery("redo", "commit"); } @Test public void basicRedoRecoveryModifyCommit() throws Exception { basicRecovery("redo", "modify-commit"); } @Test public void basicRedoRecoverySticky() throws Exception { basicRecovery("redo", "sticky"); } // Test transaction recovery from just the undo log... @Test public void basicUndoRecoveryNoAction() throws Exception { basicRecovery("undo", "none"); } @Test public void basicUndoRecoveryReset() throws Exception { basicRecovery("undo", "reset"); } @Test public void basicUndoRecoveryModifyReset() throws Exception { basicRecovery("undo", "modify-reset"); } @Test public void basicUndoRecoveryCommit() throws Exception { basicRecovery("undo", "commit"); } @Test public void basicUndoRecoveryModifyCommit() throws Exception { basicRecovery("undo", "modify-commit"); } @Test public void basicUndoRecoverySticky() throws Exception { basicRecovery("undo", "sticky"); } // Test transaction recovery from the redo and undo logs... @Test public void basicRedoUndoRecoveryNoAction() throws Exception { basicRecovery("redo-undo", "none"); } @Test public void basicRedoUndoRecoveryReset() throws Exception { basicRecovery("redo-undo", "reset"); } @Test public void basicRedoUndoRecoveryModifyReset() throws Exception { basicRecovery("redo-undo", "modify-reset"); } @Test public void basicRedoUndoRecoveryCommit() throws Exception { basicRecovery("redo-undo", "commit"); } @Test public void basicRedoUndoRecoveryModifyCommit() throws Exception { basicRecovery("redo-undo", "modify-commit"); } @Test public void basicRedoUndoRecoverySticky() throws Exception { basicRecovery("redo-undo", "sticky"); } private void basicRecovery(String recoveryType, String recoveryAction) throws Exception { byte[] key1 = "key-1".getBytes(); byte[] key2 = "key-2".getBytes(); class Recovered { final long mTxnId; final Transaction mTxn; Recovered(Transaction txn) { // Capture the transaction id before the transaction is reset. mTxnId = txn.getId(); mTxn = txn; } } BlockingQueue<Recovered> recoveredQueue = new LinkedBlockingQueue<>(); RecoveryHandler handler = new RecoveryHandler() { private Database db; @Override public void init(Database db) { this.db = db; } @Override public void recover(Transaction txn) throws IOException { recoveredQueue.add(new Recovered(txn)); switch (recoveryAction) { default: // Leak the transaction and keep the locks. break; case "modify-reset": db.findIndex("test1").store(txn, key1, "modified-1".getBytes()); db.findIndex("test2").store(txn, key2, "modified-2".getBytes()); // Fallthrough to the next case to reset. case "reset": txn.reset(); break; case "modify-commit": db.findIndex("test1").store(txn, key1, "modified-1".getBytes()); db.findIndex("test2").store(txn, key2, "modified-2".getBytes()); // Fallthrough to the next case to commit. case "commit": txn.commit(); break; } } }; DatabaseConfig config = newConfig(handler); Database db = newTempDatabase(config); long txnId; { Index ix1 = db.openIndex("test1"); Index ix2 = db.openIndex("test2"); ix1.store(null, key1, "v1".getBytes()); ix2.store(null, key2, "v2".getBytes()); Transaction txn = db.newTransaction(); ix1.store(txn, key1, "value-1".getBytes()); if ("redo-undo".equals(recoveryType)) { db.checkpoint(); // Suppress later assertion. recoveryType = "redo"; } ix2.store(txn, key2, "value-2".getBytes()); if ("undo".equals(recoveryType)) { db.checkpoint(); } else if (!"redo".equals(recoveryType)) { fail("Unknown recovery type: " + recoveryType); } txnId = txn.getId(); txn.prepare(); } for (int i=0; i<3; i++) { db = reopenTempDatabase(getClass(), db, config); Recovered recovered = recoveredQueue.take(); assertEquals(txnId, recovered.mTxnId); assertTrue(recoveredQueue.isEmpty()); Index ix1 = db.openIndex("test1"); Index ix2 = db.openIndex("test2"); switch (recoveryAction) { default: fail("Unknown recovery action: " + recoveryAction); break; case "none": case "sticky": // Locks are retained. try { ix1.load(null, key1); fail(); } catch (LockTimeoutException e) { // Expected. } try { ix2.load(null, key2); fail(); } catch (LockTimeoutException e) { // Expected. } if ("sticky".equals(recoveryAction)) { break; } recovered.mTxn.reset(); // Fallthrough to the next case and verify rollback. case "reset": case "modify-reset": // Everything was rolled back. fastAssertArrayEquals("v1".getBytes(), ix1.load(null, key1)); fastAssertArrayEquals("v2".getBytes(), ix2.load(null, key2)); break; case "modify-commit": // Everything was modified and committed. fastAssertArrayEquals("modified-1".getBytes(), ix1.load(null, key1)); fastAssertArrayEquals("modified-2".getBytes(), ix2.load(null, key2)); break; case "commit": // Everything was committed. fastAssertArrayEquals("value-1".getBytes(), ix1.load(null, key1)); fastAssertArrayEquals("value-2".getBytes(), ix2.load(null, key2)); break; } if (!"sticky".equals(recoveryAction)) { break; } // Transaction should stick around each time the database is reopened. } } @Test public void basicMix() throws Exception { // Test that unprepared transactions don't get passed to the recover handler, testing // also with multiple recovered transactions. BlockingQueue<Transaction> recovered = new LinkedBlockingQueue<>(); RecoveryHandler handler = new RecoveryHandler() { @Override public void init(Database db) { } @Override public void recover(Transaction txn) { recovered.add(txn); } }; DatabaseConfig config = newConfig(handler); Database db = newTempDatabase(config); Index ix = db.openIndex("test"); // Should rollback and not be passed to the handler. Transaction txn1 = db.newTransaction(); ix.store(txn1, "key-1".getBytes(), "value-1".getBytes()); // Should be passed to the handler. Transaction txn2 = db.newTransaction(); ix.store(txn2, "key-2".getBytes(), "value-2".getBytes()); txn2.prepare(); // Should be passed to the handler. Transaction txn3 = db.newTransaction(); ix.store(txn3, "key-3".getBytes(), "value-3".getBytes()); txn3.prepare(); // Should rollback and not be passed to the handler. Transaction txn4 = db.newTransaction(); ix.store(txn4, "key-4".getBytes(), "value-4".getBytes()); // Should commit and not be passed to the handler. Transaction txn5 = db.newTransaction(); ix.store(txn5, "key-5".getBytes(), "value-5".getBytes()); txn5.prepare(); txn5.commit(); // Should rollback and not be passed to the handler. Transaction txn6 = db.newTransaction(); ix.store(txn6, "key-6".getBytes(), "value-6".getBytes()); txn6.prepare(); txn6.exit(); db = reopenTempDatabase(getClass(), db, config); ix = db.openIndex("test"); Transaction t1 = recovered.take(); Transaction t2 = recovered.take(); assertTrue(recovered.isEmpty()); // Transactions can be recovered in any order. if (t1.getId() == txn2.getId()) { assertEquals(t2.getId(), txn3.getId()); } else { assertEquals(t1.getId(), txn3.getId()); assertEquals(t2.getId(), txn2.getId()); } // Rollback of txn1, txn4, and txn6. assertNull(ix.load(null, "key-1".getBytes())); assertNull(ix.load(null, "key-4".getBytes())); assertNull(ix.load(null, "key-6".getBytes())); // Commit of txn5. fastAssertArrayEquals("value-5".getBytes(), ix.load(null, "key-5".getBytes())); // Recovered transactions are still locked. try { ix.load(null, "key-2".getBytes()); fail(); } catch (LockTimeoutException e) { } try { ix.load(null, "key-3".getBytes()); fail(); } catch (LockTimeoutException e) { } } @Test public void reopenNoHandler() throws Exception { // When database is reopened without a recovery handler, the recovered transactions // aren't lost. RecoveryHandler handler = new RecoveryHandler() { @Override public void init(Database db) { } @Override public void recover(Transaction txn) throws IOException { txn.commit(); } }; DatabaseConfig config = newConfig(handler); Database db = newTempDatabase(config); Index ix = db.openIndex("test"); Transaction txn = db.newTransaction(); byte[] key = "hello".getBytes(); ix.store(txn, key, "world".getBytes()); txn.prepare(); // Reopen without the handler. config.recoveryHandler(null); db = reopenTempDatabase(getClass(), db, config); // Still locked. ix = db.openIndex("test"); txn = db.newTransaction(); assertEquals(LockResult.TIMED_OUT_LOCK, ix.tryLockShared(txn, key, 0)); txn.reset(); // Reopen with the handler installed. config.recoveryHandler(handler); db = reopenTempDatabase(getClass(), db, config); // Verify that the handler has committed the recovereed transaction. ix = db.openIndex("test"); fastAssertArrayEquals("world".getBytes(), ix.load(null, key)); } }
package org.jsoup.select; import org.jsoup.Jsoup; import org.jsoup.MultiLocaleRule; import org.jsoup.MultiLocaleRule.MultiLocaleTest; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.parser.Parser; import org.junit.Rule; import org.junit.Test; import static org.junit.Assert.*; /** * Tests that the selector selects correctly. * * @author Jonathan Hedley, jonathan@hedley.net */ public class SelectorTest { @Rule public MultiLocaleRule rule = new MultiLocaleRule(); @Test public void testByTag() { // should be case insensitive Elements els = Jsoup.parse("<div id=1><div id=2><p>Hello</p></div></div><DIV id=3>").select("DIV"); assertEquals(3, els.size()); assertEquals("1", els.get(0).id()); assertEquals("2", els.get(1).id()); assertEquals("3", els.get(2).id()); Elements none = Jsoup.parse("<div id=1><div id=2><p>Hello</p></div></div><div id=3>").select("span"); assertEquals(0, none.size()); } @Test public void testById() { Elements els = Jsoup.parse("<div><p id=foo>Hello</p><p id=foo>Foo two!</p></div>").select("#foo"); assertEquals(2, els.size()); assertEquals("Hello", els.get(0).text()); assertEquals("Foo two!", els.get(1).text()); Elements none = Jsoup.parse("<div id=1></div>").select("#foo"); assertEquals(0, none.size()); } @Test public void testByClass() { Elements els = Jsoup.parse("<p id=0 class='ONE two'><p id=1 class='one'><p id=2 class='two'>").select("P.One"); assertEquals(2, els.size()); assertEquals("0", els.get(0).id()); assertEquals("1", els.get(1).id()); Elements none = Jsoup.parse("<div class='one'></div>").select(".foo"); assertEquals(0, none.size()); Elements els2 = Jsoup.parse("<div class='One-Two'></div>").select(".one-two"); assertEquals(1, els2.size()); } @Test public void testByClassCaseInsensitive() { String html = "<p Class=foo>One <p Class=Foo>Two <p class=FOO>Three <p class=farp>Four"; Elements elsFromClass = Jsoup.parse(html).select("P.Foo"); Elements elsFromAttr = Jsoup.parse(html).select("p[class=foo]"); assertEquals(elsFromAttr.size(), elsFromClass.size()); assertEquals(3, elsFromClass.size()); assertEquals("Two", elsFromClass.get(1).text()); } @Test @MultiLocaleTest public void testByAttribute() { String h = "<div Title=Foo /><div Title=Bar /><div Style=Qux /><div title=Balim /><div title=SLIM />" + "<div data-name='with spaces'/>"; Document doc = Jsoup.parse(h); Elements withTitle = doc.select("[title]"); assertEquals(4, withTitle.size()); Elements foo = doc.select("[TITLE=foo]"); assertEquals(1, foo.size()); Elements foo2 = doc.select("[title=\"foo\"]"); assertEquals(1, foo2.size()); Elements foo3 = doc.select("[title=\"Foo\"]"); assertEquals(1, foo3.size()); Elements dataName = doc.select("[data-name=\"with spaces\"]"); assertEquals(1, dataName.size()); assertEquals("with spaces", dataName.first().attr("data-name")); Elements not = doc.select("div[title!=bar]"); assertEquals(5, not.size()); assertEquals("Foo", not.first().attr("title")); Elements starts = doc.select("[title^=ba]"); assertEquals(2, starts.size()); assertEquals("Bar", starts.first().attr("title")); assertEquals("Balim", starts.last().attr("title")); Elements ends = doc.select("[title$=im]"); assertEquals(2, ends.size()); assertEquals("Balim", ends.first().attr("title")); assertEquals("SLIM", ends.last().attr("title")); Elements contains = doc.select("[title*=i]"); assertEquals(2, contains.size()); assertEquals("Balim", contains.first().attr("title")); assertEquals("SLIM", contains.last().attr("title")); } @Test public void testNamespacedTag() { Document doc = Jsoup.parse("<div><abc:def id=1>Hello</abc:def></div> <abc:def class=bold id=2>There</abc:def>"); Elements byTag = doc.select("abc|def"); assertEquals(2, byTag.size()); assertEquals("1", byTag.first().id()); assertEquals("2", byTag.last().id()); Elements byAttr = doc.select(".bold"); assertEquals(1, byAttr.size()); assertEquals("2", byAttr.last().id()); Elements byTagAttr = doc.select("abc|def.bold"); assertEquals(1, byTagAttr.size()); assertEquals("2", byTagAttr.last().id()); Elements byContains = doc.select("abc|def:contains(e)"); assertEquals(2, byContains.size()); assertEquals("1", byContains.first().id()); assertEquals("2", byContains.last().id()); } @Test public void testWildcardNamespacedTag() { Document doc = Jsoup.parse("<div><abc:def id=1>Hello</abc:def></div> <abc:def class=bold id=2>There</abc:def>"); Elements byTag = doc.select("*|def"); assertEquals(2, byTag.size()); assertEquals("1", byTag.first().id()); assertEquals("2", byTag.last().id()); Elements byAttr = doc.select(".bold"); assertEquals(1, byAttr.size()); assertEquals("2", byAttr.last().id()); Elements byTagAttr = doc.select("*|def.bold"); assertEquals(1, byTagAttr.size()); assertEquals("2", byTagAttr.last().id()); Elements byContains = doc.select("*|def:contains(e)"); assertEquals(2, byContains.size()); assertEquals("1", byContains.first().id()); assertEquals("2", byContains.last().id()); } @Test public void testWildcardNamespacedXmlTag() { Document doc = Jsoup.parse( "<div><Abc:Def id=1>Hello</Abc:Def></div> <Abc:Def class=bold id=2>There</abc:def>", "", Parser.xmlParser() ); Elements byTag = doc.select("*|Def"); assertEquals(2, byTag.size()); assertEquals("1", byTag.first().id()); assertEquals("2", byTag.last().id()); Elements byAttr = doc.select(".bold"); assertEquals(1, byAttr.size()); assertEquals("2", byAttr.last().id()); Elements byTagAttr = doc.select("*|Def.bold"); assertEquals(1, byTagAttr.size()); assertEquals("2", byTagAttr.last().id()); Elements byContains = doc.select("*|Def:contains(e)"); assertEquals(2, byContains.size()); assertEquals("1", byContains.first().id()); assertEquals("2", byContains.last().id()); } @Test public void testWildCardNamespacedCaseVariations() { Document doc = Jsoup.parse("<One:Two>One</One:Two><three:four>Two</three:four>", "", Parser.xmlParser()); Elements els1 = doc.select("One|Two"); Elements els2 = doc.select("one|two"); Elements els3 = doc.select("Three|Four"); Elements els4 = doc.select("three|Four"); assertEquals(els1, els2); assertEquals(els3, els4); assertEquals("One", els1.text()); assertEquals(1, els1.size()); assertEquals("Two", els3.text()); assertEquals(1, els2.size()); } @Test @MultiLocaleTest public void testByAttributeStarting() { Document doc = Jsoup.parse("<div id=1 ATTRIBUTE data-name=jsoup>Hello</div><p data-val=5 id=2>There</p><p id=3>No</p>"); Elements withData = doc.select("[^data-]"); assertEquals(2, withData.size()); assertEquals("1", withData.first().id()); assertEquals("2", withData.last().id()); withData = doc.select("p[^data-]"); assertEquals(1, withData.size()); assertEquals("2", withData.first().id()); assertEquals(1, doc.select("[^attrib]").size()); } @Test public void testByAttributeRegex() { Document doc = Jsoup.parse("<p><img src=foo.png id=1><img src=bar.jpg id=2><img src=qux.JPEG id=3><img src=old.gif><img></p>"); Elements imgs = doc.select("img[src~=(?i)\\.(png|jpe?g)]"); assertEquals(3, imgs.size()); assertEquals("1", imgs.get(0).id()); assertEquals("2", imgs.get(1).id()); assertEquals("3", imgs.get(2).id()); } @Test public void testByAttributeRegexCharacterClass() { Document doc = Jsoup.parse("<p><img src=foo.png id=1><img src=bar.jpg id=2><img src=qux.JPEG id=3><img src=old.gif id=4></p>"); Elements imgs = doc.select("img[src~=[o]]"); assertEquals(2, imgs.size()); assertEquals("1", imgs.get(0).id()); assertEquals("4", imgs.get(1).id()); } @Test public void testByAttributeRegexCombined() { Document doc = Jsoup.parse("<div><table class=x><td>Hello</td></table></div>"); Elements els = doc.select("div table[class~=x|y]"); assertEquals(1, els.size()); assertEquals("Hello", els.text()); } @Test public void testCombinedWithContains() { Document doc = Jsoup.parse("<p id=1>One</p><p>Two +</p><p>Three +</p>"); Elements els = doc.select("p#1 + :contains(+)"); assertEquals(1, els.size()); assertEquals("Two +", els.text()); assertEquals("p", els.first().tagName()); } @Test public void testAllElements() { String h = "<div><p>Hello</p><p><b>there</b></p></div>"; Document doc = Jsoup.parse(h); Elements allDoc = doc.select("*"); Elements allUnderDiv = doc.select("div *"); assertEquals(8, allDoc.size()); assertEquals(3, allUnderDiv.size()); assertEquals("p", allUnderDiv.first().tagName()); } @Test public void testAllWithClass() { String h = "<p class=first>One<p class=first>Two<p>Three"; Document doc = Jsoup.parse(h); Elements ps = doc.select("*.first"); assertEquals(2, ps.size()); } @Test public void testGroupOr() { String h = "<div title=foo /><div title=bar /><div /><p></p><img /><span title=qux>"; Document doc = Jsoup.parse(h); Elements els = doc.select("p,div,[title]"); assertEquals(5, els.size()); assertEquals("div", els.get(0).tagName()); assertEquals("foo", els.get(0).attr("title")); assertEquals("div", els.get(1).tagName()); assertEquals("bar", els.get(1).attr("title")); assertEquals("div", els.get(2).tagName()); assertEquals(0, els.get(2).attr("title").length()); // missing attributes come back as empty string assertFalse(els.get(2).hasAttr("title")); assertEquals("p", els.get(3).tagName()); assertEquals("span", els.get(4).tagName()); } @Test public void testGroupOrAttribute() { String h = "<div id=1 /><div id=2 /><div title=foo /><div title=bar />"; Elements els = Jsoup.parse(h).select("[id],[title=foo]"); assertEquals(3, els.size()); assertEquals("1", els.get(0).id()); assertEquals("2", els.get(1).id()); assertEquals("foo", els.get(2).attr("title")); } @Test public void descendant() { String h = "<div class=head><p class=first>Hello</p><p>There</p></div><p>None</p>"; Document doc = Jsoup.parse(h); Element root = doc.getElementsByClass("HEAD").first(); Elements els = root.select(".head p"); assertEquals(2, els.size()); assertEquals("Hello", els.get(0).text()); assertEquals("There", els.get(1).text()); Elements p = root.select("p.first"); assertEquals(1, p.size()); assertEquals("Hello", p.get(0).text()); Elements empty = root.select("p .first"); // self, not descend, should not match assertEquals(0, empty.size()); Elements aboveRoot = root.select("body div.head"); assertEquals(0, aboveRoot.size()); } @Test public void and() { String h = "<div id=1 class='foo bar' title=bar name=qux><p class=foo title=bar>Hello</p></div"; Document doc = Jsoup.parse(h); Elements div = doc.select("div.foo"); assertEquals(1, div.size()); assertEquals("div", div.first().tagName()); Elements p = doc.select("div .foo"); // space indicates like "div *.foo" assertEquals(1, p.size()); assertEquals("p", p.first().tagName()); Elements div2 = doc.select("div#1.foo.bar[title=bar][name=qux]"); // very specific! assertEquals(1, div2.size()); assertEquals("div", div2.first().tagName()); Elements p2 = doc.select("div *.foo"); // space indicates like "div *.foo" assertEquals(1, p2.size()); assertEquals("p", p2.first().tagName()); } @Test public void deeperDescendant() { String h = "<div class=head><p><span class=first>Hello</div><div class=head><p class=first><span>Another</span><p>Again</div>"; Document doc = Jsoup.parse(h); Element root = doc.getElementsByClass("head").first(); Elements els = root.select("div p .first"); assertEquals(1, els.size()); assertEquals("Hello", els.first().text()); assertEquals("span", els.first().tagName()); Elements aboveRoot = root.select("body p .first"); assertEquals(0, aboveRoot.size()); } @Test public void parentChildElement() { String h = "<div id=1><div id=2><div id = 3></div></div></div><div id=4></div>"; Document doc = Jsoup.parse(h); Elements divs = doc.select("div > div"); assertEquals(2, divs.size()); assertEquals("2", divs.get(0).id()); // 2 is child of 1 assertEquals("3", divs.get(1).id()); // 3 is child of 2 Elements div2 = doc.select("div#1 > div"); assertEquals(1, div2.size()); assertEquals("2", div2.get(0).id()); } @Test public void parentWithClassChild() { String h = "<h1 class=foo><a href=1 /></h1><h1 class=foo><a href=2 class=bar /></h1><h1><a href=3 /></h1>"; Document doc = Jsoup.parse(h); Elements allAs = doc.select("h1 > a"); assertEquals(3, allAs.size()); assertEquals("a", allAs.first().tagName()); Elements fooAs = doc.select("h1.foo > a"); assertEquals(2, fooAs.size()); assertEquals("a", fooAs.first().tagName()); Elements barAs = doc.select("h1.foo > a.bar"); assertEquals(1, barAs.size()); } @Test public void parentChildStar() { String h = "<div id=1><p>Hello<p><b>there</b></p></div><div id=2><span>Hi</span></div>"; Document doc = Jsoup.parse(h); Elements divChilds = doc.select("div > *"); assertEquals(3, divChilds.size()); assertEquals("p", divChilds.get(0).tagName()); assertEquals("p", divChilds.get(1).tagName()); assertEquals("span", divChilds.get(2).tagName()); } @Test public void multiChildDescent() { String h = "<div id=foo><h1 class=bar><a href=http://example.com/>One</a></h1></div>"; Document doc = Jsoup.parse(h); Elements els = doc.select("div#foo > h1.bar > a[href*=example]"); assertEquals(1, els.size()); assertEquals("a", els.first().tagName()); } @Test public void caseInsensitive() { String h = "<dIv tItle=bAr><div>"; // mixed case so a simple toLowerCase() on value doesn't catch Document doc = Jsoup.parse(h); assertEquals(2, doc.select("DiV").size()); assertEquals(1, doc.select("DiV[TiTLE]").size()); assertEquals(1, doc.select("DiV[TiTLE=BAR]").size()); assertEquals(0, doc.select("DiV[TiTLE=BARBARELLA]").size()); } @Test public void adjacentSiblings() { String h = "<ol><li>One<li>Two<li>Three</ol>"; Document doc = Jsoup.parse(h); Elements sibs = doc.select("li + li"); assertEquals(2, sibs.size()); assertEquals("Two", sibs.get(0).text()); assertEquals("Three", sibs.get(1).text()); } @Test public void adjacentSiblingsWithId() { String h = "<ol><li id=1>One<li id=2>Two<li id=3>Three</ol>"; Document doc = Jsoup.parse(h); Elements sibs = doc.select("li#1 + li#2"); assertEquals(1, sibs.size()); assertEquals("Two", sibs.get(0).text()); } @Test public void notAdjacent() { String h = "<ol><li id=1>One<li id=2>Two<li id=3>Three</ol>"; Document doc = Jsoup.parse(h); Elements sibs = doc.select("li#1 + li#3"); assertEquals(0, sibs.size()); } @Test public void mixCombinator() { String h = "<div class=foo><ol><li>One<li>Two<li>Three</ol></div>"; Document doc = Jsoup.parse(h); Elements sibs = doc.select("body > div.foo li + li"); assertEquals(2, sibs.size()); assertEquals("Two", sibs.get(0).text()); assertEquals("Three", sibs.get(1).text()); } @Test public void mixCombinatorGroup() { String h = "<div class=foo><ol><li>One<li>Two<li>Three</ol></div>"; Document doc = Jsoup.parse(h); Elements els = doc.select(".foo > ol, ol > li + li"); assertEquals(3, els.size()); assertEquals("ol", els.get(0).tagName()); assertEquals("Two", els.get(1).text()); assertEquals("Three", els.get(2).text()); } @Test public void generalSiblings() { String h = "<ol><li id=1>One<li id=2>Two<li id=3>Three</ol>"; Document doc = Jsoup.parse(h); Elements els = doc.select("#1 ~ #3"); assertEquals(1, els.size()); assertEquals("Three", els.first().text()); } // for http://github.com/jhy/jsoup/issues#issue/10 @Test public void testCharactersInIdAndClass() { // using CSS spec for identifiers (id and class): a-z0-9, -, _. NOT . (which is OK in html spec, but not css) String h = "<div><p id='a1-foo_bar'>One</p><p class='b2-qux_bif'>Two</p></div>"; Document doc = Jsoup.parse(h); Element el1 = doc.getElementById("a1-foo_bar"); assertEquals("One", el1.text()); Element el2 = doc.getElementsByClass("b2-qux_bif").first(); assertEquals("Two", el2.text()); Element el3 = doc.select("#a1-foo_bar").first(); assertEquals("One", el3.text()); Element el4 = doc.select(".b2-qux_bif").first(); assertEquals("Two", el4.text()); } // for http://github.com/jhy/jsoup/issues#issue/13 @Test public void testSupportsLeadingCombinator() { String h = "<div><p><span>One</span><span>Two</span></p></div>"; Document doc = Jsoup.parse(h); Element p = doc.select("div > p").first(); Elements spans = p.select("> span"); assertEquals(2, spans.size()); assertEquals("One", spans.first().text()); // make sure doesn't get nested h = "<div id=1><div id=2><div id=3></div></div></div>"; doc = Jsoup.parse(h); Element div = doc.select("div").select(" > div").first(); assertEquals("2", div.id()); } @Test public void testPseudoLessThan() { Document doc = Jsoup.parse("<div><p>One</p><p>Two</p><p>Three</>p></div><div><p>Four</p>"); Elements ps = doc.select("div p:lt(2)"); assertEquals(3, ps.size()); assertEquals("One", ps.get(0).text()); assertEquals("Two", ps.get(1).text()); assertEquals("Four", ps.get(2).text()); } @Test public void testPseudoGreaterThan() { Document doc = Jsoup.parse("<div><p>One</p><p>Two</p><p>Three</p></div><div><p>Four</p>"); Elements ps = doc.select("div p:gt(0)"); assertEquals(2, ps.size()); assertEquals("Two", ps.get(0).text()); assertEquals("Three", ps.get(1).text()); } @Test public void testPseudoEquals() { Document doc = Jsoup.parse("<div><p>One</p><p>Two</p><p>Three</>p></div><div><p>Four</p>"); Elements ps = doc.select("div p:eq(0)"); assertEquals(2, ps.size()); assertEquals("One", ps.get(0).text()); assertEquals("Four", ps.get(1).text()); Elements ps2 = doc.select("div:eq(0) p:eq(0)"); assertEquals(1, ps2.size()); assertEquals("One", ps2.get(0).text()); assertEquals("p", ps2.get(0).tagName()); } @Test public void testPseudoBetween() { Document doc = Jsoup.parse("<div><p>One</p><p>Two</p><p>Three</>p></div><div><p>Four</p>"); Elements ps = doc.select("div p:gt(0):lt(2)"); assertEquals(1, ps.size()); assertEquals("Two", ps.get(0).text()); } @Test public void testPseudoCombined() { Document doc = Jsoup.parse("<div class='foo'><p>One</p><p>Two</p></div><div><p>Three</p><p>Four</p></div>"); Elements ps = doc.select("div.foo p:gt(0)"); assertEquals(1, ps.size()); assertEquals("Two", ps.get(0).text()); } @Test public void testPseudoHas() { Document doc = Jsoup.parse("<div id=0><p><span>Hello</span></p></div> <div id=1><span class=foo>There</span></div> <div id=2><p>Not</p></div>"); Elements divs1 = doc.select("div:has(span)"); assertEquals(2, divs1.size()); assertEquals("0", divs1.get(0).id()); assertEquals("1", divs1.get(1).id()); Elements divs2 = doc.select("div:has([class])"); assertEquals(1, divs2.size()); assertEquals("1", divs2.get(0).id()); Elements divs3 = doc.select("div:has(span, p)"); assertEquals(3, divs3.size()); assertEquals("0", divs3.get(0).id()); assertEquals("1", divs3.get(1).id()); assertEquals("2", divs3.get(2).id()); Elements els1 = doc.body().select(":has(p)"); assertEquals(3, els1.size()); // body, div, dib assertEquals("body", els1.first().tagName()); assertEquals("0", els1.get(1).id()); assertEquals("2", els1.get(2).id()); Elements els2 = doc.body().select(":has(> span)"); assertEquals(2,els2.size()); // p, div assertEquals("p",els2.first().tagName()); assertEquals("1", els2.get(1).id()); } @Test public void testNestedHas() { Document doc = Jsoup.parse("<div><p><span>One</span></p></div> <div><p>Two</p></div>"); Elements divs = doc.select("div:has(p:has(span))"); assertEquals(1, divs.size()); assertEquals("One", divs.first().text()); // test matches in has divs = doc.select("div:has(p:matches((?i)two))"); assertEquals(1, divs.size()); assertEquals("div", divs.first().tagName()); assertEquals("Two", divs.first().text()); // test contains in has divs = doc.select("div:has(p:contains(two))"); assertEquals(1, divs.size()); assertEquals("div", divs.first().tagName()); assertEquals("Two", divs.first().text()); } @Test @MultiLocaleTest public void testPseudoContains() { Document doc = Jsoup.parse("<div><p>The Rain.</p> <p class=light>The <i>RAIN</i>.</p> <p>Rain, the.</p></div>"); Elements ps1 = doc.select("p:contains(Rain)"); assertEquals(3, ps1.size()); Elements ps2 = doc.select("p:contains(the rain)"); assertEquals(2, ps2.size()); assertEquals("The Rain.", ps2.first().html()); assertEquals("The <i>RAIN</i>.", ps2.last().html()); Elements ps3 = doc.select("p:contains(the Rain):has(i)"); assertEquals(1, ps3.size()); assertEquals("light", ps3.first().className()); Elements ps4 = doc.select(".light:contains(rain)"); assertEquals(1, ps4.size()); assertEquals("light", ps3.first().className()); Elements ps5 = doc.select(":contains(rain)"); assertEquals(8, ps5.size()); // html, body, div,... Elements ps6 = doc.select(":contains(RAIN)"); assertEquals(8, ps6.size()); } @Test public void testPsuedoContainsWithParentheses() { Document doc = Jsoup.parse("<div><p id=1>This (is good)</p><p id=2>This is bad)</p>"); Elements ps1 = doc.select("p:contains(this (is good))"); assertEquals(1, ps1.size()); assertEquals("1", ps1.first().id()); Elements ps2 = doc.select("p:contains(this is bad\\))"); assertEquals(1, ps2.size()); assertEquals("2", ps2.first().id()); } @Test @MultiLocaleTest public void containsOwn() { Document doc = Jsoup.parse("<p id=1>Hello <b>there</b> igor</p>"); Elements ps = doc.select("p:containsOwn(Hello IGOR)"); assertEquals(1, ps.size()); assertEquals("1", ps.first().id()); assertEquals(0, doc.select("p:containsOwn(there)").size()); Document doc2 = Jsoup.parse("<p>Hello <b>there</b> IGOR</p>"); assertEquals(1, doc2.select("p:containsOwn(igor)").size()); } @Test public void testMatches() { Document doc = Jsoup.parse("<p id=1>The <i>Rain</i></p> <p id=2>There are 99 bottles.</p> <p id=3>Harder (this)</p> <p id=4>Rain</p>"); Elements p1 = doc.select("p:matches(The rain)"); // no match, case sensitive assertEquals(0, p1.size()); Elements p2 = doc.select("p:matches((?i)the rain)"); // case insense. should include root, html, body assertEquals(1, p2.size()); assertEquals("1", p2.first().id()); Elements p4 = doc.select("p:matches((?i)^rain$)"); // bounding assertEquals(1, p4.size()); assertEquals("4", p4.first().id()); Elements p5 = doc.select("p:matches(\\d+)"); assertEquals(1, p5.size()); assertEquals("2", p5.first().id()); Elements p6 = doc.select("p:matches(\\w+\\s+\\(\\w+\\))"); // test bracket matching assertEquals(1, p6.size()); assertEquals("3", p6.first().id()); Elements p7 = doc.select("p:matches((?i)the):has(i)"); // multi assertEquals(1, p7.size()); assertEquals("1", p7.first().id()); } @Test public void matchesOwn() { Document doc = Jsoup.parse("<p id=1>Hello <b>there</b> now</p>"); Elements p1 = doc.select("p:matchesOwn((?i)hello now)"); assertEquals(1, p1.size()); assertEquals("1", p1.first().id()); assertEquals(0, doc.select("p:matchesOwn(there)").size()); } @Test public void testRelaxedTags() { Document doc = Jsoup.parse("<abc_def id=1>Hello</abc_def> <abc-def id=2>There</abc-def>"); Elements el1 = doc.select("abc_def"); assertEquals(1, el1.size()); assertEquals("1", el1.first().id()); Elements el2 = doc.select("abc-def"); assertEquals(1, el2.size()); assertEquals("2", el2.first().id()); } @Test public void notParas() { Document doc = Jsoup.parse("<p id=1>One</p> <p>Two</p> <p><span>Three</span></p>"); Elements el1 = doc.select("p:not([id=1])"); assertEquals(2, el1.size()); assertEquals("Two", el1.first().text()); assertEquals("Three", el1.last().text()); Elements el2 = doc.select("p:not(:has(span))"); assertEquals(2, el2.size()); assertEquals("One", el2.first().text()); assertEquals("Two", el2.last().text()); } @Test public void notAll() { Document doc = Jsoup.parse("<p>Two</p> <p><span>Three</span></p>"); Elements el1 = doc.body().select(":not(p)"); // should just be the span assertEquals(2, el1.size()); assertEquals("body", el1.first().tagName()); assertEquals("span", el1.last().tagName()); } @Test public void notClass() { Document doc = Jsoup.parse("<div class=left>One</div><div class=right id=1><p>Two</p></div>"); Elements el1 = doc.select("div:not(.left)"); assertEquals(1, el1.size()); assertEquals("1", el1.first().id()); } @Test public void handlesCommasInSelector() { Document doc = Jsoup.parse("<p name='1,2'>One</p><div>Two</div><ol><li>123</li><li>Text</li></ol>"); Elements ps = doc.select("[name=1,2]"); assertEquals(1, ps.size()); Elements containers = doc.select("div, li:matches([0-9,]+)"); assertEquals(2, containers.size()); assertEquals("div", containers.get(0).tagName()); assertEquals("li", containers.get(1).tagName()); assertEquals("123", containers.get(1).text()); } @Test public void selectSupplementaryCharacter() { String s = new String(Character.toChars(135361)); Document doc = Jsoup.parse("<div k" + s + "='" + s + "'>^" + s +"$/div>"); assertEquals("div", doc.select("div[k" + s + "]").first().tagName()); assertEquals("div", doc.select("div:containsOwn(" + s + ")").first().tagName()); } @Test public void selectClassWithSpace() { final String html = "<div class=\"value\">class without space</div>\n" + "<div class=\"value \">class with space</div>"; Document doc = Jsoup.parse(html); Elements found = doc.select("div[class=value ]"); assertEquals(2, found.size()); assertEquals("class without space", found.get(0).text()); assertEquals("class with space", found.get(1).text()); found = doc.select("div[class=\"value \"]"); assertEquals(2, found.size()); assertEquals("class without space", found.get(0).text()); assertEquals("class with space", found.get(1).text()); found = doc.select("div[class=\"value\\ \"]"); assertEquals(0, found.size()); } @Test public void selectSameElements() { final String html = "<div>one</div><div>one</div>"; Document doc = Jsoup.parse(html); Elements els = doc.select("div"); assertEquals(2, els.size()); Elements subSelect = els.select(":contains(one)"); assertEquals(2, subSelect.size()); } @Test public void attributeWithBrackets() { String html = "<div data='End]'>One</div> <div data='[Another)]]'>Two</div>"; Document doc = Jsoup.parse(html); assertEquals("One", doc.select("div[data='End]']").first().text()); assertEquals("Two", doc.select("div[data='[Another)]]']").first().text()); assertEquals("One", doc.select("div[data=\"End]\"]").first().text()); assertEquals("Two", doc.select("div[data=\"[Another)]]\"]").first().text()); } @Test @MultiLocaleTest public void containsData() { String html = "<p>function</p><script>FUNCTION</script><style>item</style><span><!-- comments --></span>"; Document doc = Jsoup.parse(html); Element body = doc.body(); Elements dataEls1 = body.select(":containsData(function)"); Elements dataEls2 = body.select("script:containsData(function)"); Elements dataEls3 = body.select("span:containsData(comments)"); Elements dataEls4 = body.select(":containsData(o)"); Elements dataEls5 = body.select("style:containsData(ITEM)"); assertEquals(2, dataEls1.size()); // body and script assertEquals(1, dataEls2.size()); assertEquals(dataEls1.last(), dataEls2.first()); assertEquals("<script>FUNCTION</script>", dataEls2.outerHtml()); assertEquals(1, dataEls3.size()); assertEquals("span", dataEls3.first().tagName()); assertEquals(3, dataEls4.size()); assertEquals("body", dataEls4.first().tagName()); assertEquals("script", dataEls4.get(1).tagName()); assertEquals("span", dataEls4.get(2).tagName()); assertEquals(1, dataEls5.size()); } @Test public void containsWithQuote() { String html = "<p>One'One</p><p>One'Two</p>"; Document doc = Jsoup.parse(html); Elements els = doc.select("p:contains(One\\'One)"); assertEquals(1, els.size()); assertEquals("One'One", els.text()); } @Test public void selectFirst() { String html = "<p>One<p>Two<p>Three"; Document doc = Jsoup.parse(html); assertEquals("One", doc.selectFirst("p").text()); } @Test public void selectFirstWithAnd() { String html = "<p>One<p class=foo>Two<p>Three"; Document doc = Jsoup.parse(html); assertEquals("Two", doc.selectFirst("p.foo").text()); } @Test public void selectFirstWithOr() { String html = "<p>One<p>Two<p>Three<div>Four"; Document doc = Jsoup.parse(html); assertEquals("One", doc.selectFirst("p, div").text()); } @Test public void matchText() { String html = "<p>One<br>Two</p>"; Document doc = Jsoup.parse(html); String origHtml = doc.html(); Elements one = doc.select("p:matchText:first-child"); assertEquals("One", one.first().text()); Elements two = doc.select("p:matchText:last-child"); assertEquals("Two", two.first().text()); assertEquals(origHtml, doc.html()); assertEquals("Two", doc.select("p:matchText + br + *").text()); } @Test public void splitOnBr() { String html = "<div><p>One<br>Two<br>Three</p></div>"; Document doc = Jsoup.parse(html); Elements els = doc.select("p:matchText"); assertEquals(3, els.size()); assertEquals("One", els.get(0).text()); assertEquals("Two", els.get(1).text()); assertEquals("Three", els.get(2).toString()); } @Test public void matchTextAttributes() { Document doc = Jsoup.parse("<div><p class=one>One<br>Two<p class=two>Three<br>Four"); Elements els = doc.select("p.two:matchText:last-child"); assertEquals(1, els.size()); assertEquals("Four", els.text()); } @Test public void findBetweenSpan() { Document doc = Jsoup.parse("<p><span>One</span> Two <span>Three</span>"); Elements els = doc.select("span ~ p:matchText"); // the Two becomes its own p, sibling of the span assertEquals(1, els.size()); assertEquals("Two", els.text()); } @Test public void startsWithBeginsWithSpace() { Document doc = Jsoup.parse("<small><a href=\" mailto:abc@def.net\">(abc@def.net)</a></small>"); Elements els = doc.select("a[href^=' mailto']"); assertEquals(1, els.size()); } @Test public void endsWithEndsWithSpaces() { Document doc = Jsoup.parse("<small><a href=\" mailto:abc@def.net \">(abc@def.net)</a></small>"); Elements els = doc.select("a[href$='.net ']"); assertEquals(1, els.size()); } private final String mixedCase = "<html xmlns:n=\"urn:ns\"><n:mixedCase>text</n:mixedCase></html>"; private final String lowercase = "<html xmlns:n=\"urn:ns\"><n:lowercase>text</n:lowercase></html>"; @Test public void html_mixed_case_simple_name() { Document doc = Jsoup.parse(mixedCase, "", Parser.htmlParser()); assertEquals(0, doc.select("mixedCase").size()); } @Test public void html_mixed_case_wildcard_name() { Document doc = Jsoup.parse(mixedCase, "", Parser.htmlParser()); assertEquals(1, doc.select("*|mixedCase").size()); } @Test public void html_lowercase_simple_name() { Document doc = Jsoup.parse(lowercase, "", Parser.htmlParser()); assertEquals(0, doc.select("lowercase").size()); } @Test public void html_lowercase_wildcard_name() { Document doc = Jsoup.parse(lowercase, "", Parser.htmlParser()); assertEquals(1, doc.select("*|lowercase").size()); } @Test public void xml_mixed_case_simple_name() { Document doc = Jsoup.parse(mixedCase, "", Parser.xmlParser()); assertEquals(0, doc.select("mixedCase").size()); } @Test public void xml_mixed_case_wildcard_name() { Document doc = Jsoup.parse(mixedCase, "", Parser.xmlParser()); assertEquals(1, doc.select("*|mixedCase").size()); } @Test public void xml_lowercase_simple_name() { Document doc = Jsoup.parse(lowercase, "", Parser.xmlParser()); assertEquals(0, doc.select("lowercase").size()); } @Test public void xml_lowercase_wildcard_name() { Document doc = Jsoup.parse(lowercase, "", Parser.xmlParser()); assertEquals(1, doc.select("*|lowercase").size()); } @Test public void trimSelector() { Document doc = Jsoup.parse("<p><span>Hello"); Elements els = doc.select(" p span "); assertEquals(1, els.size()); assertEquals("Hello", els.first().text()); } @Test public void xmlWildcardNamespaceTest() { Document doc = Jsoup.parse("<ns1:MyXmlTag>1111</ns1:MyXmlTag><ns2:MyXmlTag>2222</ns2:MyXmlTag>", "", Parser.xmlParser()); Elements select = doc.select("*|MyXmlTag"); assertEquals(2, select.size()); assertEquals("1111", select.get(0).text()); assertEquals("2222", select.get(1).text()); } @Test public void childElements() { String html = "<body><span id=1>One <span id=2>Two</span></span></body>"; Document doc = Jsoup.parse(html); Element outer = doc.selectFirst("span"); Element span = outer.selectFirst("span"); Element inner = outer.selectFirst("* span"); assertEquals("1", outer.id()); assertEquals("1", span.id()); assertEquals("2", inner.id()); assertEquals(outer, span); assertNotEquals(outer, inner); } }
package test.assertion; import org.testng.Assert; import org.testng.annotations.Test; import org.testng.asserts.IAssert; import org.testng.asserts.SoftAssert; import java.util.ArrayList; import java.util.Collection; public class SoftAssertTest { @Test public void testOnSucceedAndFailureCalled() throws Exception { final Collection<IAssert> succeed = new ArrayList<>(); final Collection<IAssert> failures = new ArrayList<>(); final SoftAssert sa = new SoftAssert() { @Override public void onAssertSuccess(IAssert assertCommand) { succeed.add(assertCommand); } @Override public void onAssertFailure(IAssert assertCommand, AssertionError ex) { failures.add(assertCommand); } }; sa.assertTrue(true); sa.assertTrue(false); Assert.assertEquals(succeed.size(), 1, succeed.toString()); Assert.assertEquals(failures.size(), 1, failures.toString()); } @Test public void testAssertAllCount() throws Exception { String message = "My message"; SoftAssert sa = new SoftAssert(); sa.assertTrue(true); sa.assertTrue(false, message); try { sa.assertAll(); Assert.fail("Exception expected"); } catch (AssertionError e) { String[] lines = e.getMessage().split("\r?\n"); Assert.assertEquals(lines.length, 2); lines[1] = lines[1].replaceFirst(message, ""); Assert.assertFalse(lines[1].contains(message)); } } }
package wepa.wepa.selenium; import org.fluentlenium.adapter.FluentTest; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; import org.junit.runner.RunWith; import org.openqa.selenium.WebDriver; import org.openqa.selenium.htmlunit.HtmlUnitDriver; import org.springframework.boot.context.embedded.LocalServerPort; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) public class PersonTest extends FluentTest { public WebDriver webDriver = new HtmlUnitDriver(); public WebDriver getDefaultDriver() { return webDriver; } @LocalServerPort private Integer port; @Test public void addingExercisesNewStudent() { // fill(find("#username")).with("nakki"); // fill(find("#password")).with("nakki"); // submit(find("form").first()); // assertFalse(pageSource().contains("555555555")); // assertFalse(pageSource().contains("Maija")); // fill(find("#studentNumber")).with("555555555"); // fill(find("#name")).with("Maija"); // fill(find("#exercises")).with("4"); // submit(find("form").first()); // assertTrue(pageSource().contains("555555555")); // assertTrue(pageSource().contains("Maija")); } }
// jTDS JDBC Driver for Microsoft SQL Server // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package net.sourceforge.jtds.test; import java.sql.*; import java.math.BigDecimal; import java.text.SimpleDateFormat; import junit.framework.TestSuite; import net.sourceforge.jtds.util.Logger; // MJH - Changes for use with new version of jTDS // Changed testCursorResultSetConcurrency0003 so that cursor result sets // only allows one open result set per statement. // Changed insertBigDecimal and testLongToVarchar to use execute rather than // executeUpdate as the code wants to obtain a result set later on. // New version of jTDS swallows everything on // executeUpdate to allow it to return the last update count by default. // Changed testCursorLargeCharInsert0017 to check for DataTruncation exception // rather than specific error code. /** * @author Alin Sinpalean * @version 1.0 * @since 0.4 */ public class SAfeTest extends DatabaseTestCase { public SAfeTest(String name) { super(name); } public static void main(String args[]) { Logger.setActive(true); if (args.length > 0) { junit.framework.TestSuite s = new TestSuite(); for (int i=0; i<args.length; i++) { s.addTest(new SAfeTest(args[i])); } junit.textui.TestRunner.run(s); } else { junit.textui.TestRunner.run(SAfeTest.class); } } /** * Test whether NULL values, 0-length strings and single space strings * are treated right. */ public void testNullLengthStrings0001() throws Exception { String types[] = { "VARCHAR(50)", "TEXT", "VARCHAR(350)", "NVARCHAR(50)", "NTEXT", }; String values[] = { null, "", " ", "x" }; Statement stmt = con.createStatement(); boolean tds70orLater = props.getProperty("TDS")==null || props.getProperty("TDS").charAt(0)>='7'; int typeCnt = tds70orLater ? types.length : 2; for (int i=0; i<typeCnt; i++) { assertTrue(stmt.executeUpdate("CREATE TABLE #SAfe0001 (val "+types[i]+" NULL)")==0); for (int j=0; j<values.length; j++) { String insQuery = values[j]==null ? "INSERT INTO #SAfe0001 VALUES (NULL)" : "INSERT INTO #SAfe0001 VALUES ('"+values[j]+"')"; assertTrue(stmt.executeUpdate(insQuery)==1); ResultSet rs = stmt.executeQuery("SELECT val FROM #SAfe0001"); assertTrue(rs.next()); if (tds70orLater || !" ".equals(values[j])) assertEquals(values[j], rs.getString(1)); else assertEquals("", rs.getObject(1)); assertTrue(!rs.next()); assertTrue(stmt.executeUpdate("TRUNCATE TABLE #SAfe0001")==0); } assertTrue(stmt.executeUpdate("DROP TABLE #SAfe0001")==0); } } /** * Test cancelling. Create 2 connections, lock some records on one of them * and try to read them using the other one. Then, try executing some other * queries on the second connection to make sure it's in a correct state. */ public void testCancel0002() throws Exception { // Create another connection to make sure the 2 statements use the same // physical connection Connection con2 = getConnection(); Statement stmt = con.createStatement(); assertTrue(!stmt.execute( "create table ##SAfe0002 (id int primary key, val varchar(20) null) "+ "insert into ##SAfe0002 values (1, 'Line 1') "+ "insert into ##SAfe0002 values (2, 'Line 2')")); assertEquals(0, stmt.getUpdateCount()); assertTrue(!stmt.getMoreResults()); assertEquals(1, stmt.getUpdateCount()); assertTrue(!stmt.getMoreResults()); assertEquals(1, stmt.getUpdateCount()); assertTrue(!stmt.getMoreResults()); assertEquals(-1, stmt.getUpdateCount()); con.setAutoCommit(false); // This is where we lock the first line in the table stmt.executeUpdate("update ##SAfe0002 set val='Updated Line' where id=1"); Statement stmt2 = con2.createStatement(); stmt2.setQueryTimeout(1); try { stmt2.executeQuery("select * from ##SAfe0002"); fail(); } catch (SQLException ex) { // SAfe We won't do an ex.getMessage().equals(...) test here // because the message could change and the test would fail. // We'll just assume we got here because of the timeout. ;o) } // SAfe What should we do with the results if the execution timed out?! con.commit(); con.setAutoCommit(true); stmt.execute("drop table ##SAfe0002"); stmt.close(); // Just run a tiny query to make sure the stream is still in working // condition. ResultSet rs = stmt2.executeQuery("select 1"); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); assertTrue(!rs.next()); con2.close(); } // MT-unsafe!!! volatile int started, done; volatile boolean failed; /** * Test <code>CursorResultSet</code> concurrency. Create a number of threads that execute concurrent queries using * scrollable result sets. All requests should be run on the same connection (<code>Tds</code> instance). */ public void testCursorResultSetConcurrency0003() throws Exception { Statement stmt0 = con.createStatement(); stmt0.execute("create table #SAfe0003(id int primary key, val varchar(20) null) "+ "insert into #SAfe0003 values (1, 'Line 1') "+ "insert into #SAfe0003 values (2, 'Line 2')"); while (stmt0.getMoreResults() || stmt0.getUpdateCount()!=-1); final Object o1=new Object(), o2=new Object(); int threadCount = 25; Thread threads[] = new Thread[threadCount]; started = done = 0; failed = false; for (int i=0; i<threadCount; i++) { threads[i] = new Thread() { public void run() { ResultSet rs = null; Statement stmt = null; try { stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery("SELECT * FROM #SAfe0003"); assertEquals(null, rs.getWarnings()); assertEquals(null, stmt.getWarnings()); // Synchronize all threads synchronized(o2) { synchronized(o1) { started++; o1.notify(); } try { o2.wait(); } catch (InterruptedException e) { } } assertNotNull("executeQuery should not return null", rs); assertTrue(rs.next()); assertTrue(rs.next()); assertTrue(!rs.next()); assertTrue(rs.previous()); assertTrue(rs.previous()); assertTrue(!rs.previous()); } catch (SQLException e) { e.printStackTrace(); synchronized (o1) { failed = true; } fail("An SQL Exception occured: "+e); } finally { if (rs != null) if (stmt != null) try { stmt.close(); } catch (SQLException e) { } // Notify that we're done synchronized(o1) { done++; o1.notify(); } } } }; threads[i].start(); } while (true) { synchronized(o1) { if (started == threadCount) break; o1.wait(); } } synchronized(o2) { o2.notifyAll(); } boolean passed = true; for (int i=0; i<threadCount; i++) { stmt0 = con.createStatement(); ResultSet rs = stmt0.executeQuery("SELECT 1234"); passed &= rs.next(); passed &= !rs.next(); stmt0.close(); } while (true) { synchronized(o1) { if (done == threadCount) break; o1.wait(); } } for (int i=0; i<threadCount; i++) threads[i].join(); stmt0.close(); assertTrue(passed); assertTrue(!failed); } /** * Check that meta data information is fetched even for empty cursor-based result sets (bug #613199). * * @throws Exception */ public void testCursorResultSetEmpty0004() throws Exception { Statement stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rs = stmt.executeQuery("SELECT 5 Value WHERE 1=0"); assertEquals(null, stmt.getWarnings()); assertEquals(null, rs.getWarnings()); assertEquals("Value", rs.getMetaData().getColumnName(1)); assertTrue(!rs.isBeforeFirst()); assertTrue(!rs.isAfterLast()); assertTrue(!rs.isFirst()); assertTrue(!rs.isLast()); rs.next(); assertTrue(!rs.isBeforeFirst()); assertTrue(!rs.isAfterLast()); assertTrue(!rs.isFirst()); assertTrue(!rs.isLast()); rs.close(); stmt.close(); } /** * Check that values returned from bit fields are correct (not just 0) (bug #841670). * * @throws Exception */ public void testBitFields0005() throws Exception { Statement stmt = con.createStatement(); stmt.execute("create table #SAfe0005(id int primary key, bit1 bit not null, bit2 bit null) "+ "insert into #SAfe0005 values (0, 0, 0) "+ "insert into #SAfe0005 values (1, 1, 1) "+ "insert into #SAfe0005 values (2, 0, NULL)"); while (stmt.getMoreResults() || stmt.getUpdateCount()!=-1); ResultSet rs = stmt.executeQuery("SELECT * FROM #SAfe0005"); while (rs.next()) { int id = rs.getInt(1); int bit1 = rs.getInt(2); int bit2 = rs.getInt(3); assertTrue("id: "+id+"; bit1: "+bit1+"; bit2: "+bit2, bit1==id%2 && (bit2==id || id==2 && rs.wasNull())); } rs.close(); stmt.close(); } /** * Test that <code>CallableStatement</code>s with return values work correctly. * * @throws Exception */ public void testCallableStatement0006() throws Exception { final int myVal = 13; Statement stmt = con.createStatement(); stmt.execute("CREATE PROCEDURE #SAfe0006 @p1 INT, @p2 VARCHAR(20) OUT AS " + "SELECT @p2=CAST(@p1-1 AS VARCHAR(20)) " + "SELECT @p1 AS value " + "RETURN @p1+1"); stmt.close(); // Try all formats: escaped, w/ exec and w/o exec String[] sql = {"{?=call #SAfe0006(?,?)}", "exec ?=#SAfe0006 ?,?", "?=#SAfe0006 ?,?"}; for (int i=0; i<sql.length; i++) { // Execute it using executeQuery CallableStatement cs = con.prepareCall(sql[i]); cs.registerOutParameter(1, Types.INTEGER); cs.setInt(2, myVal); cs.registerOutParameter(3, Types.VARCHAR); cs.executeQuery().close(); assertFalse(cs.getMoreResults()); assertEquals(-1, cs.getUpdateCount()); assertEquals(myVal+1, cs.getInt(1)); assertEquals(String.valueOf(myVal-1), cs.getString(3)); cs.close(); // Now use execute cs = con.prepareCall(sql[i]); cs.registerOutParameter(1, Types.INTEGER); cs.setInt(2, myVal); cs.registerOutParameter(3, Types.VARCHAR); assertTrue(cs.execute()); cs.getResultSet().close(); assertFalse(cs.getMoreResults()); assertEquals(-1, cs.getUpdateCount()); assertEquals(myVal+1, cs.getInt(1)); assertEquals(String.valueOf(myVal-1), cs.getString(3)); cs.close(); } } /** * Helper method for <code>testBigDecimal0007</code>. Inserts a BigDecimal * value obtained from a double value. * * @param stmt <code>PreparedStatement</code> instance * @param val the <code>double</code> value to insert * @param scaleFlag if <code>true</code> scale the value to 4, otherwise * leave it as it is */ private static void insertBigDecimal(PreparedStatement stmt, double val, boolean scaleFlag) throws Exception { BigDecimal bd = new BigDecimal(val); if (scaleFlag) { bd = bd.setScale(4, BigDecimal.ROUND_HALF_EVEN); } stmt.setBigDecimal(1, bd); stmt.execute(); int rowCount = stmt.getUpdateCount(); assertEquals(1, rowCount); assertTrue(stmt.getMoreResults()); ResultSet rs = stmt.getResultSet(); assertTrue(rs.next()); assertEquals("Values don't match.", val, rs.getDouble(1), 0); } /** * Test <code>BigDecimal</code>s created from double values (i.e with very * large scales). */ public void testBigDecimal0007() throws Exception { Statement createStmt = con.createStatement(); createStmt.execute("CREATE TABLE #SAfe0007(value MONEY)"); createStmt.close(); PreparedStatement stmt = con.prepareStatement( "INSERT INTO #SAfe0007(value) VALUES (?) " + "SELECT * FROM #SAfe0007 DELETE #SAfe0007"); // Now test with certain values. insertBigDecimal(stmt, 1.1, false); insertBigDecimal(stmt, 0.1, false); insertBigDecimal(stmt, 0.1, true); insertBigDecimal(stmt, 0.01, false); insertBigDecimal(stmt, 0.01, true); insertBigDecimal(stmt, 0.02, false); insertBigDecimal(stmt, 0.02, true); insertBigDecimal(stmt, 0.25, false); stmt.close(); } /** * Test writing <code>long</code> values to VARCHAR fields. There was a * regression introduced in release 0.6 that caused <code>long</code> * fields to be sent with non-zero scale and appear with decimals when * written into VARCHAR fields. */ public void testLongToVarchar0008() throws Exception { long myVal = 13; Statement createStmt = con.createStatement(); createStmt.execute("CREATE TABLE #SAfe0008(value VARCHAR(255))"); createStmt.close(); PreparedStatement stmt = con.prepareStatement( "INSERT INTO #SAfe0008(value) values (?) " + "SELECT * FROM #SAfe0008 DELETE #SAfe0008"); stmt.setLong(1, myVal); stmt.execute(); int rowCount = stmt.getUpdateCount(); assertEquals(1, rowCount); assertTrue(stmt.getMoreResults()); ResultSet rs = stmt.getResultSet(); assertTrue(rs.next()); assertEquals("Values don't match.", String.valueOf(myVal), rs.getString(1)); stmt.close(); } /** * Test <code>ResultSet.deleteRow()</code> on updateable result sets. */ public void testDeleteRow0009() throws Exception { Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #SAfe0009(value VARCHAR(255) PRIMARY KEY)"); stmt.close(); PreparedStatement insStmt = con.prepareStatement( "INSERT INTO #SAfe0009(value) values (?)"); insStmt.setString(1, "Row 1"); assertEquals(1, insStmt.executeUpdate()); insStmt.setString(1, "Row 2"); assertEquals(1, insStmt.executeUpdate()); insStmt.close(); stmt = con.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); ResultSet rs = stmt.executeQuery("SELECT * FROM #SAfe0009 ORDER BY 1"); assertEquals(null, stmt.getWarnings()); assertEquals(null, rs.getWarnings()); assertTrue(rs.last()); assertTrue(!rs.rowDeleted()); rs.deleteRow(); assertTrue(rs.rowDeleted()); rs.close(); rs = stmt.executeQuery("SELECT * FROM #SAfe0009"); assertTrue(rs.next()); assertEquals("Row 1", rs.getString(1)); assertTrue(!rs.next()); rs.close(); stmt.close(); } /** * Test VARCHAR output parameters returned by CallableStatements. * <p> * An issue existed, caused by the fact that the parameter was sent to SQL * Server as a short VARCHAR (not XORed with 0x80) limiting its length to * 255 characters. See bug [815348] for more details. */ public void testCallableStatementVarchar0010() throws Exception { Statement stmt = con.createStatement(); stmt.execute("CREATE PROCEDURE #SAfe0010 @p1 VARCHAR(2049) OUT AS " + "SELECT @p1 = @p1 + @p1 " + "SELECT @p1 = @p1 + @p1 " + "SELECT @p1 = @p1 + @p1 " + "SELECT @p1 AS value " + "RETURN LEN(@p1)"); stmt.close(); // 256 characters long string String myVal = "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" + "012345"; // Execute it using executeQuery CallableStatement cs = con.prepareCall("{?=call #SAfe0010(?)}"); cs.registerOutParameter(1, Types.INTEGER); cs.setString(2, myVal); cs.registerOutParameter(2, Types.VARCHAR); ResultSet rs = cs.executeQuery(); assertTrue(rs.next()); String rsVal = rs.getString(1); rs.close(); assertFalse(cs.getMoreResults()); assertEquals(-1, cs.getUpdateCount()); assertEquals(myVal.length() * 8, cs.getInt(1)); assertEquals(rsVal, cs.getString(2)); cs.close(); } /** * Test <code>ResultSet.updateRow()</code> on updateable result sets. */ public void testUpdateRow0011() throws Exception { Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #SAfe0011(value VARCHAR(255) PRIMARY KEY)"); stmt.close(); PreparedStatement insStmt = con.prepareStatement( "INSERT INTO #SAfe0011(value) values (?)"); insStmt.setString(1, "Row 1"); assertEquals(1, insStmt.executeUpdate()); insStmt.setString(1, "Row 2"); assertEquals(1, insStmt.executeUpdate()); insStmt.close(); stmt = con.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = stmt.executeQuery("SELECT * FROM #SAfe0011 ORDER BY 1"); assertEquals(null, stmt.getWarnings()); assertEquals(null, rs.getWarnings()); assertTrue(rs.next()); assertTrue(rs.next()); rs.updateString(1, "Row X"); rs.updateRow(); rs.next(); assertEquals("Row X", rs.getString(1)); rs.close(); } /** * Test <code>ResultSet.insertRow()</code> on updateable result sets. */ public void testInsertRow0012() throws Exception { Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #SAfe0012(value VARCHAR(255) PRIMARY KEY)"); stmt.close(); PreparedStatement insStmt = con.prepareStatement( "INSERT INTO #SAfe0012(value) values (?)"); insStmt.setString(1, "Row 1"); assertEquals(1, insStmt.executeUpdate()); insStmt.setString(1, "Row 2"); assertEquals(1, insStmt.executeUpdate()); insStmt.close(); stmt = con.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); ResultSet rs = stmt.executeQuery("SELECT * FROM #SAfe0012 ORDER BY 1"); assertEquals(null, stmt.getWarnings()); assertEquals(null, rs.getWarnings()); // Insert the new row rs.moveToInsertRow(); rs.updateString(1, "Row X"); rs.insertRow(); // Check the ResultSet contents rs.moveToCurrentRow(); rs.next(); assertEquals("Row 1", rs.getString(1)); rs.next(); assertEquals("Row 2", rs.getString(1)); rs.next(); assertEquals("Row X", rs.getString(1)); rs.close(); } /** * Test how an "out-of-order" close behaves (e.g close the * <code>Connection</code> first, then the <code>Statement</code> anf * finally the <code>ResultSet</code>). */ public void testOutOfOrderClose0013() throws Exception { Connection localConn = getConnection(); Statement stmt = localConn.createStatement(); stmt.execute("CREATE TABLE #SAfe0013(value VARCHAR(255) PRIMARY KEY)"); PreparedStatement insStmt = localConn.prepareStatement( "INSERT INTO #SAfe0013(value) values (?)"); insStmt.setString(1, "Row 1"); assertEquals(1, insStmt.executeUpdate()); insStmt.setString(1, "Row 2"); assertEquals(1, insStmt.executeUpdate()); ResultSet rs = stmt.executeQuery("SELECT * FROM #SAfe0013"); // Close the connection first localConn.close(); // Now, close the statements stmt.close(); insStmt.close(); // And finally, close the ResultSet rs.close(); } /** * Test cursor-based <code>ResultSet</code>s obtained from * <code>PreparedStatement</code>s and <code>CallableStatement</code>s. */ public void testPreparedAndCallableCursors0014() throws Exception { Statement stmt = con.createStatement(); stmt.executeUpdate("CREATE TABLE #SAfe0014(id INT PRIMARY KEY)"); stmt.executeUpdate("INSERT INTO #SAfe0014 VALUES (1)"); stmt.executeUpdate("CREATE PROCEDURE #sp_SAfe0014(@P1 INT, @P2 INT) AS " + "SELECT id, @P2 FROM #SAfe0014 WHERE id=@P1"); stmt.close(); PreparedStatement ps = con.prepareStatement("SELECT id FROM #SAfe0014", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); ResultSet resultSet = ps.executeQuery(); // No warnings assertEquals(null, resultSet.getWarnings()); assertEquals(null, ps.getWarnings()); // Correct ResultSet assertTrue(resultSet.next()); assertEquals(1, resultSet.getInt(1)); assertTrue(!resultSet.next()); // Correct meta data ResultSetMetaData rsmd = resultSet.getMetaData(); assertEquals("id", rsmd.getColumnName(1)); assertEquals("#SAfe0014", rsmd.getTableName(1)); // Insert row resultSet.moveToInsertRow(); resultSet.updateInt(1, 2); resultSet.insertRow(); resultSet.moveToCurrentRow(); // Check correct row count resultSet.last(); assertEquals(2, resultSet.getRow()); resultSet.close(); ps.close(); ps = con.prepareStatement("SELECT id, ? FROM #SAfe0014 WHERE id = ?", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); ps.setInt(1, 5); ps.setInt(2, 1); resultSet = ps.executeQuery(); // No warnings assertEquals(null, resultSet.getWarnings()); assertEquals(null, ps.getWarnings()); // Correct ResultSet assertTrue(resultSet.next()); assertEquals(1, resultSet.getInt(1)); assertEquals(5, resultSet.getInt(2)); assertTrue(!resultSet.next()); // Correct meta data rsmd = resultSet.getMetaData(); assertEquals("id", rsmd.getColumnName(1)); assertEquals("#SAfe0014", rsmd.getTableName(1)); resultSet.close(); ps.close(); CallableStatement cs = con.prepareCall("{call #sp_SAfe0014(?,?)}", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); cs.setInt(1, 1); cs.setInt(2, 3); resultSet = cs.executeQuery(); // No warnings assertEquals(null, resultSet.getWarnings()); assertEquals(null, cs.getWarnings()); // Correct ResultSet assertTrue(resultSet.next()); assertEquals(1, resultSet.getInt(1)); assertEquals(3, resultSet.getInt(2)); assertTrue(!resultSet.next()); // Correct meta data rsmd = resultSet.getMetaData(); assertEquals("id", rsmd.getColumnName(1)); assertEquals("#SAfe0014", rsmd.getTableName(1)); resultSet.close(); cs.close(); } /** * Test batch updates for both plain and prepared statements. */ public void testBatchUpdates0015() throws Exception { Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #SAfe0015(value VARCHAR(255) PRIMARY KEY)"); // Execute prepared batch PreparedStatement insStmt = con.prepareStatement( "INSERT INTO #SAfe0015(value) values (?)"); insStmt.setString(1, "Row 1"); insStmt.addBatch(); insStmt.setString(1, "Row 2"); insStmt.addBatch(); int[] res = insStmt.executeBatch(); assertEquals(2, res.length); assertEquals(1, res[0]); assertEquals(1, res[1]); // Execute an empty batch res = insStmt.executeBatch(); assertEquals(0, res.length); insStmt.close(); // Execute plain batch stmt.addBatch("UPDATE #SAfe0015 SET value='R1' WHERE value='Row 1'"); stmt.addBatch("UPDATE #SAfe0015 SET value='R2' WHERE value='Row 2'"); res = stmt.executeBatch(); assertEquals(2, res.length); assertEquals(1, res[0]); assertEquals(1, res[1]); // Execute an empty batch res = stmt.executeBatch(); assertEquals(0, res.length); stmt.close(); } /** * Test that dates prior to 06/15/1940 0:00:00 are stored and retrieved * correctly. */ public void testOldDates0016() throws Exception { Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #SAfe0016(id INT, value DATETIME)"); SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String[] dates = { "1983-10-30 02:00:00", "1983-10-30 01:59:59", "1940-06-14 23:59:59", "1911-03-11 00:51:39", "1911-03-11 00:51:38", "1900-01-01 01:00:00", "1900-01-01 00:59:59", "1900-01-01 00:09:21", "1900-01-01 00:09:20", "1753-01-01 00:00:00" }; // Insert the timestamps PreparedStatement pstmt = con.prepareStatement("INSERT INTO #SAfe0016 VALUES(?, ?)"); for (int i=0; i<dates.length; i++) { pstmt.setInt(1, i); pstmt.setString(2, dates[i]); pstmt.addBatch(); } int[] res = pstmt.executeBatch(); // Check that the insertion went ok assertEquals(dates.length, res.length); for (int i=0; i<dates.length; i++) { assertEquals(1, res[i]); } // Select the timestamps and make sure they are the same ResultSet rs = stmt.executeQuery( "SELECT value FROM #SAfe0016 ORDER BY id"); int counter = 0; while (rs.next()) { assertEquals(format.parse(dates[counter]), rs.getTimestamp(1)); ++counter; } // Close everything rs.close(); stmt.close(); pstmt.close(); } /** * Test bug #926620 - Too long value for VARCHAR field. */ public void testCursorLargeCharInsert0017() throws Exception { Statement stmt = con.createStatement( ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); stmt.execute("CREATE TABLE #SAfe0017(value VARCHAR(10) PRIMARY KEY)"); // Create the updateable ResultSet ResultSet rs = stmt.executeQuery( "SELECT value FROM #SAfe0017"); // Try inserting a character string less than 10 characters long rs.moveToInsertRow(); rs.updateString(1, "Test"); rs.insertRow(); rs.moveToCurrentRow(); rs.last(); // Check that we do indeed have one row in the ResultSet now assertEquals(1, rs.getRow()); // Try inserting a character string more than 10 characters long rs.moveToInsertRow(); rs.updateString(1, "Testing: 1, 2, 3..."); try { rs.insertRow(); fail("Should cause an SQLException with native error number 8152" + "and SQL state 22001"); } catch (SQLException ex) { // assertEquals("22001", ex.getSQLState()); assertTrue(ex instanceof DataTruncation); } // Close everything rs.close(); stmt.close(); } /** * Test for bug [939206] TdsException: can't sent this BigDecimal */ public void testBigDecimal1() throws Exception { BigDecimal bigDecimal = new BigDecimal("1E+27"); Statement stmt = con.createStatement(); stmt.execute("create table #testBigDecimal1 (data decimal(28,0))"); stmt.close(); PreparedStatement pstmt = con.prepareStatement("insert into #testBigDecimal1 (data) values (?)"); pstmt.setBigDecimal(1, bigDecimal); assertTrue(pstmt.executeUpdate() == 1); pstmt.close(); pstmt = con.prepareStatement("select data from #testBigDecimal1"); ResultSet rs = pstmt.executeQuery(); assertTrue(rs.next()); assertTrue(bigDecimal.equals(rs.getBigDecimal(1))); assertTrue(!rs.next()); pstmt.close(); rs.close(); } }
package net.sf.jaer.graphics; import java.awt.Color; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.HashMap; import java.util.logging.Logger; import javax.swing.JButton; import net.sf.jaer.chip.AEChip; import net.sf.jaer.event.BasicEvent; import net.sf.jaer.event.EventPacket; import net.sf.jaer.event.orientation.OrientationEventInterface; import net.sf.jaer.eventio.AEInputStream; import net.sf.jaer.util.SpikeSound; /** * Superclass for classes that render DVS and other sensor/chip AEs to a memory buffer so that they can be * painted on the screen. Note these classes do not actually render to the * graphics device; They take AEPacket's and render them to a pixmap memory * buffer that later gets painted by a ChipCanvas. The method chosen (by user * cycling method from GUI) chooses how the events are painted. In effect the * events are histogrammed for most rendering methods except for "color-time", * and even there they are histogrammed or averaged. For methods that render * polarized events (such as ON-OFF) then ON events increase the rendered value * while OFF events decreases it. Thus the rendered image fr can be drawn in 3-d * if desired and it will represent a histogram, although the default method * using for drawing the rendered frame is to paint the cell brightness. * * @author tobi * @see ChipRendererDisplayMethod */ public class AEChipRenderer extends Chip2DRenderer implements PropertyChangeListener { private boolean addedPropertyChangeListener = false; public boolean externalRenderer = false; /** * PropertyChange events */ public static final String EVENT_COLOR_SCALE_CHANGE = "colorScale"; /** * PropertyChange events */ public static final String EVENT_COLOR_MODE_CHANGE = "colorMode"; /** * @return the specialCount */ public int getSpecialCount() { return specialCount; } /** * @param specialCount * the specialCount to set */ public void setSpecialCount(int specialCount) { this.specialCount = specialCount; } public void incrementSpecialCount(int specialCountInc) { this.specialCount += specialCountInc; } public enum ColorMode { GrayLevel("Each event causes linear change in brightness"), Contrast("Each event causes multiplicative change in brightness to produce logarithmic scale"), RedGreen("ON events are green; OFF events are red"), ColorTime("Events are colored according to time within displayed slice, with red coding old events and green coding new events"), GrayTime("Events are colored according to time within displayed slice, with white coding old events and black coding new events"), HotCode("Events counts are colored blue to red, blue=0, red=full scale"), WhiteBackground("Events counts (unsigned) are dark on white background"), // ComplementaryFilter("Events are reconstructed using bandpass event filter") ; public String description; ColorMode(String description) { this.description = description; } @Override public String toString() { return super.toString() + ": " + description; } }; protected ColorMode[] colorModes = ColorMode.values(); // array of mode enums protected ColorMode colorMode; { ColorMode oldMode; try { oldMode = ColorMode.valueOf(prefs.get("ChipRenderer.colorMode", ColorMode.GrayLevel.name())); } catch (IllegalArgumentException e) { oldMode = ColorMode.GrayLevel; } for (ColorMode c : colorModes) { if (c == oldMode) { colorMode = c; } } } /** * perceptually separated hues - as estimated quickly by tobi */ protected static final int[] HUES = { 0, 36, 45, 61, 70, 100, 169, 188, 205, 229, 298, 318, }; /** * the number of rendering methods implemented */ public static int NUM_METHODS = 4; /** * number of colors used to represent time of event */ public static final int NUM_TIME_COLORS = 255; /** * chip shadows Chip2D's chip to declare it as AEChip */ protected AEChip chip; // protected AEPacket2D ae = null; protected EventPacket packet = null; /** * the chip rendered for */ protected boolean ignorePolarityEnabled = false; protected Logger log = Logger.getLogger("net.sf.jaer.graphics"); /** * The Colors that different cell types are painted. checkTypeColors should * populate this array. */ protected Color[] typeColors; /** * Used for rendering multiple cell types in different RGB colors. * checkTypeColors should populate this array of [numTypes][3] size. Each * 3-vector are the RGB color components for that cell type. */ protected float[][] typeColorRGBComponents; protected SpikeSound spikeSound; protected float step; // this is last step of RGB value used in rendering protected boolean stereoEnabled = false; protected int subsampleThresholdEventCount = prefs.getInt("ChipRenderer.subsampleThresholdEventCount", 50000); /** * determines subSampling of rendered events (for speed) */ protected boolean subsamplingEnabled = prefs.getBoolean("ChipRenderer.subsamplingEnabled", false); protected float[][] timeColors; protected int specialCount = 0; public AEChipRenderer(AEChip chip) { super(chip); if (chip == null) { throw new Error("tried to build ChipRenderer with null chip"); } setChip(chip); spikeSound = new SpikeSound(); timeColors = new float[NUM_TIME_COLORS][3]; float s = 1f / NUM_TIME_COLORS; for (int i = 0; i < NUM_TIME_COLORS; i++) { int rgb = Color.HSBtoRGB((0.66f * (NUM_TIME_COLORS - i)) / NUM_TIME_COLORS, 1f, 1f); Color c = new Color(rgb); float[] comp = c.getRGBColorComponents(null); timeColors[i][0] = comp[0]; timeColors[i][2] = comp[2]; timeColors[i][1] = comp[1]; // System.out.println(String.format("%.2f %.2f %.2f",comp[0],comp[1],comp[2])); } } /** * Overrides color scale setting to update the stored accumulated pixmap * when the color scale is changed. * * */ @Override synchronized public void setColorScale(int colorScale) { int old = this.colorScale; super.setColorScale(colorScale); if (old == this.colorScale) { return; } float r = (float) old / colorScale; // e.g. r=0.5 when scale changed from 1 to 2 if (pixmap == null) { return; } float[] f = getPixmapArray(); switch (colorMode) { case GrayLevel: case Contrast: // colorScale=1,2,3; step = 1, 1/2, 1/3, 1/4, ; // later type-grayValue gives -.5 or .5 for spike value, when // multipled gives steps of 1/2, 1/3, 1/4 to end up with 0 or 1 when colorScale=1 and you have one event for (int i = 0; i < f.length; i += 3) { final float g = 0.5f; float d = f[i] - g; d = d * r; f[i] = d + g; f[i + 1] = d + g; f[i + 2] = d + g; } break; case RedGreen: for (int i = 0; i < f.length; i += 3) { f[i] = f[i] * r; f[i + 1] = f[i + 1] * r; } break; default: // rendering method unknown, reset to default value log.warning("colorMode " + colorMode + " unknown, reset to default value 0"); setColorMode(ColorMode.GrayLevel); } getSupport().firePropertyChange(EVENT_COLOR_SCALE_CHANGE, old, colorScale); } /** * Does the rendering using selected method. * * @param packet * a packet of events (already extracted from raw events) * @see #setColorMode */ public synchronized void render(EventPacket packet) { if (!addedPropertyChangeListener) { if (chip instanceof AEChip) { AEChip aeChip = chip; if (aeChip.getAeViewer() != null) { aeChip.getAeViewer().addPropertyChangeListener(this); addedPropertyChangeListener = true; } } } if (packet == null) { return; } this.packet = packet; int numEvents = packet.getSize(); int skipBy = 1; if (isSubsamplingEnabled()) { while ((numEvents / skipBy) > getSubsampleThresholdEventCount()) { skipBy++; } } checkPixmapAllocation(); float[] f = getPixmapArray(); float a; resetSelectedPixelEventCount(); // init it for this packet boolean ignorePolarity = isIgnorePolarityEnabled(); setSpecialCount(0); try { if (packet.getNumCellTypes() > 2) { checkTypeColors(packet.getNumCellTypes()); if (!accumulateEnabled && !externalRenderer) { resetFrame(0); } step = 1f / (colorScale); for (Object obj : packet) { BasicEvent e = (BasicEvent) obj; if (e.isSpecial()) { setSpecialCount(specialCount + 1); // TODO optimize special count increment continue; } int type = e.getType(); if ((e.x == xsel) && (e.y == ysel)) { playSpike(type); } int ind = getPixMapIndex(e.x, e.y); // float[] f = fr[e.y][e.x]; // setPixmapPosition(e.x, e.y); float[] c = typeColorRGBComponents[type]; if ((obj instanceof OrientationEventInterface) && (((OrientationEventInterface) obj).isHasOrientation() == false)) { // if event is orientation event but orientation was not set, just draw as gray level f[ind] += step; // if(f[0]>1f) f[0]=1f; f[ind + 1] += step; // if(f[1]>1f) f[1]=1f; f[ind + 2] += step; // if(f[2]>1f) f[2]=1f; } else if (colorScale > 1) { f[ind] += c[0] * step; // if(f[0]>1f) f[0]=1f; f[ind + 1] += c[1] * step; // if(f[1]>1f) f[1]=1f; f[ind + 2] += c[2] * step; // if(f[2]>1f) f[2]=1f; } else { // if color scale is 1, then last value is used as the pixel value, which quantizes the color to // full scale. f[ind] = c[0]; // if(f[0]>1f) f[0]=1f; f[ind + 1] = c[1]; // if(f[1]>1f) f[1]=1f; f[ind + 2] = c[2]; // if(f[2]>1f) f[2]=1f; } } } else { switch (colorMode) { case GrayLevel: if (!accumulateEnabled && !externalRenderer) { resetFrame(.5f); // also sets grayValue } step = 2f / (colorScale + 1); // colorScale=1,2,3; step = 1, 1/2, 1/3, 1/4, ; // later type-grayValue gives -.5 or .5 for spike value, when // multipled gives steps of 1/2, 1/3, 1/4 to end up with 0 or 1 when colorScale=1 and you have // one event for (Object obj : packet) { BasicEvent e = (BasicEvent) obj; int type = e.getType(); if (e.isSpecial()) { setSpecialCount(specialCount + 1); // TODO optimate special count increment continue; } if ((e.x == xsel) && (e.y == ysel)) { playSpike(type); } int ind = getPixMapIndex(e.x, e.y); a = f[ind]; if (!ignorePolarity) { a += step * (type - grayValue); // type-.5 = -.5 or .5; step*type= -.5, .5, (cs=1) or // -.25, .25 (cs=2) etc. } else { a += step * (1 - grayValue); // type-.5 = -.5 or .5; step*type= -.5, .5, (cs=1) or -.25, // .25 (cs=2) etc. } f[ind] = a; f[ind + 1] = a; f[ind + 2] = a; } break; case Contrast: if (!accumulateEnabled && !externalRenderer) { resetFrame(.5f); } float eventContrastRecip = 1 / eventContrast; for (Object obj : packet) { BasicEvent e = (BasicEvent) obj; int type = e.getType(); if (e.isSpecial()) { setSpecialCount(specialCount + 1); // TODO optimate special count increment continue; } if ((e.x == xsel) && (e.y == ysel)) { playSpike(type); } int ind = getPixMapIndex(e.x, e.y); a = f[ind]; switch (type) { case 0: a *= eventContrastRecip; // off cell divides gray break; case 1: a *= eventContrast; // on multiplies gray } f[ind] = a; f[ind + 1] = a; f[ind + 2] = a; } break; case RedGreen: if (!accumulateEnabled && !externalRenderer) { resetFrame(0); } step = 1f / (colorScale); // cs=1, step=1, cs=2, step=.5 for (Object obj : packet) { BasicEvent e = (BasicEvent) obj; int type = e.getType(); if (e.isSpecial()) { setSpecialCount(specialCount + 1); // TODO optimate special count increment continue; } // System.out.println("x: " + e.x + " y:" + e.y); if ((e.x == xsel) && (e.y == ysel)) { playSpike(type); } int ind = getPixMapIndex(e.x, e.y); f[ind + type] += step; } break; case ColorTime: if (!accumulateEnabled && !externalRenderer) { resetFrame(0); } if (numEvents == 0) { return; } int ts0 = packet.getFirstTimestamp(); float dt = packet.getDurationUs(); step = 1f / (colorScale); // cs=1, step=1, cs=2, step=.5 for (Object obj : packet) { BasicEvent e = (BasicEvent) obj; int type = e.getType(); if (e.isSpecial()) { setSpecialCount(getSpecialCount() + 1); // TODO optimate special count increment continue; } if ((e.x == xsel) && (e.y == ysel)) { playSpike(type); } int index = getPixMapIndex(e.x, e.y); int ind = (int) Math.floor(((NUM_TIME_COLORS - 1) * (e.timestamp - ts0)) / dt); if (ind < 0) { ind = 0; } else if (ind >= timeColors.length) { ind = timeColors.length - 1; } if (colorScale > 1) { for (int c = 0; c < 3; c++) { f[index + c] += timeColors[ind][c] * step; } } else { f[index] = timeColors[ind][0]; f[index + 1] = timeColors[ind][1]; f[index + 2] = timeColors[ind][2]; } } break; default: // rendering method unknown, reset to default value log.warning("colorMode " + colorMode + " unknown, reset to default value 0"); setColorMode(ColorMode.GrayLevel); } } autoScaleFrame(f); } catch (ArrayIndexOutOfBoundsException e) { if ((chip.getFilterChain() != null) && (chip.getFilterChain().getProcessingMode() != net.sf.jaer.eventprocessing.FilterChain.ProcessingMode.ACQUISITION)) { // only // print // real-time // mode // has // not // invalidated // the // packet // are // trying // render e.printStackTrace(); log.warning(e.toString() + ": ChipRenderer.render(), some event out of bounds for this chip type?"); } } pixmap.rewind(); } /** * Autoscales frame data so that max value is 1. If autoscale is disabled, * then values are just clipped to 0-1 range. If autoscale is enabled, then * gray is mapped back to gray and following occurs: * <p> * Global normalizer is tricky because we want to map max value to 1 OR min * value to 0, whichever is greater magnitude, max or min. ALSO, max and min * are distances from gray level in positive and negative directions. After * global normalizer is computed, all values are divided by normalizer in * order to keep gray level constant. * * @param fr * the frame rgb data [y][x][rgb] * @param gray * the gray level */ protected void autoScaleFrame(float[][][] fr, float gray) { if (!autoscaleEnabled) { return; } { // compute min and max values and divide to keep gray level constant // float[] mx={Float.MIN_VALUE,Float.MIN_VALUE,Float.MIN_VALUE}, // mn={Float.MAX_VALUE,Float.MAX_VALUE,Float.MAX_VALUE}; float max = Float.NEGATIVE_INFINITY, min = Float.POSITIVE_INFINITY; // max=max-.5f; // distance of max from gray // min=.5f-min; // distance of min from gray for (float[][] element : fr) { for (float[] element2 : element) { for (int k = 0; k < 3; k++) { float f = element2[k] - gray; if (f > max) { max = f; } else if (f < min) { min = f; } } } } // global normalizer here // this is tricky because we want to map max value to 1 OR min value to 0, whichever is greater magnitude, // max or min // ALSO, max and min are distances from gray level in positive and negative directions float m, b = gray; // slope/intercept of mapping function if (max == min) { return; // if max==min then no need to normalize or do anything, just paint gray } if (max > -min) { // map max to 1, gray to gray m = (1 - gray) / (max); b = gray - (gray * m); } else { // map min to 0, gray to gray m = gray / (-min); b = gray - (gray * m); } // float norm=(float)Math.max(Math.abs(max),Math.abs(min)); // norm is max distance from gray level // System.out.println("norm="+norm); if (colorMode != ColorMode.Contrast) { autoScaleValue = Math.round(Math.max(max, -min) / step); // this is value shown to user, step was // computed during rendering to be (usually) // 1/colorScale } else { if (max > -min) { autoScaleValue = 1; // this is value shown to user, step was computed during rendering to be // (usually) 1/colorScale } else { autoScaleValue = -1; // this is value shown to user, step was computed during rendering to be // (usually) 1/colorScale } } // normalize all channels for (int i = 0; i < fr.length; i++) { for (int j = 0; j < fr[i].length; j++) { for (int k = 0; k < 3; k++) { float f = fr[i][j][k]; float f2 = (m * f) + b; if (f2 < 0) { f2 = 0; } else if (f2 > 1) { f2 = 1; // shouldn't need this } fr[i][j][k] = f2; } } } } } /** * autoscales frame data so that max value is 1. If autoscale is disabled, * then values are just clipped to 0-1 range. If autoscale is enabled, then * gray is mapped back to gray and following occurs: * <p> * Global normalizer is tricky because we want to map max value to 1 OR min * value to 0, whichever is greater magnitude, max or min. ALSO, max and min * are distances from gray level in positive and negative directions. After * global normalizer is computed, all values are divided by normalizer in * order to keep gray level constant. * * @param fr * the frame rgb data pixmap */ protected void autoScaleFrame(float[] fr) { if (!autoscaleEnabled) { return; } // compute min and max values and divide to keep gray level constant // float[] mx={Float.MIN_VALUE,Float.MIN_VALUE,Float.MIN_VALUE}, // mn={Float.MAX_VALUE,Float.MAX_VALUE,Float.MAX_VALUE}; float max = Float.NEGATIVE_INFINITY, min = Float.POSITIVE_INFINITY; // max=max-.5f; // distance of max from gray // min=.5f-min; // distance of min from gray for (float element : fr) { float f = element - grayValue; if (f > max) { max = f; } else if (f < min) { min = f; } } // global normalizer here // this is tricky because we want to map max value to 1 OR min value to 0, whichever is greater magnitude, max // or min // ALSO, max and min are distances from gray level in positive and negative directions float m, b = grayValue; // slope/intercept of mapping function if (max == min) { return; // if max==min then no need to normalize or do anything, just paint gray } if (max > -min) { // map max to 1, gray to gray m = (1 - grayValue) / (max); b = grayValue - (grayValue * m); } else { // map min to 0, gray to gray m = grayValue / (-min); b = grayValue - (grayValue * m); } if (colorMode != ColorMode.Contrast) { autoScaleValue = Math.round(Math.max(max, -min) / step); // this is value shown to user, step was computed // during rendering to be (usually) 1/colorScale } else { if (max > -min) { autoScaleValue = 1; // this is value shown to user, step was computed during rendering to be (usually) // 1/colorScale } else { autoScaleValue = -1; // this is value shown to user, step was computed during rendering to be (usually) // 1/colorScale } } // normalize all channels for (int i = 0; i < fr.length; i++) { fr[i] = (m * fr[i]) + b; } } private HashMap<Integer, float[][]> typeColorsMap = new HashMap<Integer, float[][]>(); /** * Creates colors for each cell type (e.g. orientation) so that they are * spread over hue space in a manner to attempt to be maximally different in * hue. * * <p> * Subclasses can override this method to customize the colors drawn but the * subclasses should check if the color have been created since * checkTypeColors is called on every rendering cycle. This method should * first check if typeColorRGBComponents already exists and has the correct * number of elements. If not, allocate and populate typeColorRGBComponents * so that type t corresponds to typeColorRGBComponents[t][0] for red, * typeColorRGBComponents[t][1] for green, and typeColorRGBComponents[t][3] * for blue. It should also populate the Color[] typeColors. * * new code should use the #makeTypeColors method which caches the colors in a HashMap by numbers of cell types * * @param numCellTypes the number of colors to generate * @see #typeColors * @see #typeColorRGBComponents */ protected void checkTypeColors(int numCellTypes) { if ((typeColorRGBComponents == null) || (typeColorRGBComponents.length != numCellTypes)) { typeColorRGBComponents = new float[numCellTypes][3]; setTypeColors(new Color[numCellTypes]); StringBuffer b = new StringBuffer("cell type rendering colors (type: rgb):\n"); for (int i = 0; i < typeColorRGBComponents.length; i++) { int hueIndex = (int) Math.floor(((float) i / typeColorRGBComponents.length) * HUES.length); // float hue=(float)(numCellTypes-i)/(numCellTypes); float hue = HUES[hueIndex] / 255f; // hue=hue*hue; // Color c=space.fromCIEXYZ(comp); Color c = Color.getHSBColor(hue, 1, 1); getTypeColors()[i] = c; typeColorRGBComponents[i][0] = (float) c.getRed() / 255; typeColorRGBComponents[i][1] = (float) c.getGreen() / 255; typeColorRGBComponents[i][2] = (float) c.getBlue() / 255; JButton but = new JButton(" "); // TODO why is this button here? maybe to be used by some subclasses or // users? but.setBackground(c); but.setForeground(c); b.append(String.format("type %d: %.2f, %.2f, %.2f\n", i, typeColorRGBComponents[i][0], typeColorRGBComponents[i][1], typeColorRGBComponents[i][2])); } log.info(b.toString()); } } /** * Creates colors for each cell type (e.g. orientation) so that they are * spread over hue space in a manner to attempt to be maximally different in * hue. * <p> * Subclasses can override this method to customize the colors drawn but the * subclasses should check if the color have been created since * checkTypeColors is called on every rendering cycle. This method should * first check if typeColorRGBComponents already exists and has the correct * number of elements. If not, allocate and populate typeColorRGBComponents * so that type t corresponds to typeColorRGBComponents[t][0] for red, * typeColorRGBComponents[t][1] for green, and typeColorRGBComponents[t][3] * for blue. It should also populate the Color[] typeColors. * * @param numCellTypes * the number of colors to generate * @return the float[][] of colors, each row of which is an RGB color * triplet in float 0-1 range for a particular cell type * @see #typeColors * @see #typeColorRGBComponents */ public float[][] makeTypeColors(int numCellTypes) { float[][] colors = typeColorsMap.get(numCellTypes); if (colors == null) { colors = new float[numCellTypes][3]; setTypeColors(new Color[numCellTypes]); for (int i = 0; i < numCellTypes; i++) { int hueIndex = (int) Math.floor(((float) i / numCellTypes) * HUES.length); float hue = HUES[hueIndex] / 255f; Color c = Color.getHSBColor(hue, 1, 1); colors[i][0] = (float) c.getRed() / 255; colors[i][1] = (float) c.getGreen() / 255; colors[i][2] = (float) c.getBlue() / 255; } typeColorsMap.put(numCellTypes, colors); return colors; } return typeColorsMap.get(numCellTypes); } /** * go on to next rendering method */ public synchronized void cycleColorMode() { int m = colorMode.ordinal(); if (++m >= colorModes.length) { m = 0; } setColorMode(colorModes[m]); // method++; // if (method > NUM_METHODS-1) method = 0; // setColorMode(method); // store preferences } /** * returns the last packet rendered * * @return the last packet that was rendered */ public EventPacket getPacket() { return packet; } public void setChip(AEChip chip) { this.chip = chip; } public AEChip getChip() { return chip; } public ColorMode getColorMode() { return colorMode; } public int getSubsampleThresholdEventCount() { return subsampleThresholdEventCount; } public boolean isIgnorePolarityEnabled() { return ignorePolarityEnabled; } protected boolean isMethodMonochrome() { if ((colorMode == ColorMode.GrayLevel) || (colorMode == ColorMode.Contrast)) { return true; } else { return false; } } public boolean isStereoEnabled() { return stereoEnabled; } public boolean isSubsamplingEnabled() { return subsamplingEnabled; } /** * Plays a single spike click and increments the selectedPixelEventCount * counter * * @param type * 0 to play left, 1 to play right */ protected void playSpike(int type) { spikeSound.play(type); selectedPixelEventCount++; } /** * Sets whether an external renderer adds data to the array and resets it * * @param extRender */ public void setExternalRenderer(boolean extRender) { externalRenderer = extRender; } /** * Sets whether to ignore event polarity when rendering so that all event * types increase brightness * * @param ignorePolarityEnabled * true to ignore */ public void setIgnorePolarityEnabled(boolean ignorePolarityEnabled) { this.ignorePolarityEnabled = ignorePolarityEnabled; } /** * @param colorMode * the rendering method, e.g. gray, red/green opponency, * time encoded. */ public synchronized void setColorMode(ColorMode colorMode) { ColorMode old = this.colorMode; this.colorMode = colorMode; prefs.put("ChipRenderer.colorMode", colorMode.name()); log.info(this.getClass().getSimpleName()+": colorMode=" + colorMode); getSupport().firePropertyChange(EVENT_COLOR_MODE_CHANGE, old, colorMode); // if (method<0 || method >NUM_METHODS-1) throw new RuntimeException("no such rendering method "+method); // this.method = method; // prefs.putInt("ChipRenderer.method",method); } public void setStereoEnabled(boolean stereoEnabled) { this.stereoEnabled = stereoEnabled; } public void setSubsampleThresholdEventCount(int subsampleThresholdEventCount) { prefs.putInt("ChipRenderer.subsampleThresholdEventCount", subsampleThresholdEventCount); this.subsampleThresholdEventCount = subsampleThresholdEventCount; } public void setSubsamplingEnabled(boolean subsamplingEnabled) { this.subsamplingEnabled = subsamplingEnabled; prefs.putBoolean("ChipRenderer.subsamplingEnabled", subsamplingEnabled); } /** * @see AEChipRenderer#typeColorRGBComponents * @return a 2-d float array of color components. Each row of the array is a * 3-vector of RGB color components for rendering a particular cell type. */ public float[][] getTypeColorRGBComponents() { checkTypeColors(chip.getNumCellTypes()); // should be efficient return typeColorRGBComponents; } /** * @see AEChipRenderer#typeColorRGBComponents */ public void setTypeColorRGBComponents(float[][] typeColors) { this.typeColorRGBComponents = typeColors; } /** * @see AEChipRenderer#typeColors */ public Color[] getTypeColors() { return typeColors; } /** * @see AEChipRenderer#typeColors */ public void setTypeColors(Color[] typeColors) { this.typeColors = typeColors; } @Override public void propertyChange(PropertyChangeEvent pce) { if (pce.getPropertyName() == AEInputStream.EVENT_REWOUND) { resetFrame(grayValue); } } }